+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --add-peerdirs-tests all --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -A --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.CN6ltxXS7r --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest ydb --build-custom-json=/home/runner/actions_runner/_work/ydb/ydb/graph.json --custom-context=/home/runner/actions_runner/_work/ydb/ydb/context.json --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} | 1.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} | 1.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} | 2.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 2.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest | 3.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 3.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 2.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 2.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest | 2.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest | 2.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 3.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} | 3.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats | 4.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} | 4.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/mixedpy/tests/py3test | 4.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} | 5.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 5.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} | 5.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/node_broker/tests/py3test | 5.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} | 5.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} | 5.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest | 5.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/viewer/tests/py3test | 5.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} | 5.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/test_connection/ut/unittest | 5.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} | 5.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} | 5.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/add_column/py3test | 5.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/tablet/ut/unittest | 6.1%| PREPARE $(VCS) | 7.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/common/py3test | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a | 9.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |10.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/regexp/libcore-backup-regexp.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |13.6%| PREPARE $(BLACK-1616416287) - 8.40 MB |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |14.1%| PREPARE $(RUFF-3583455953) - 8.40 MB |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |14.9%| PREPARE $(FLAKE8_PY3-3596799299) - 23.54 MB |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |19.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |21.0%| PREPARE $(TEST_TOOL_HOST-sbr:10669990147) - 38.05 MB |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |22.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |22.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_file_download/libyt-lib-yt_file_download.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |25.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |25.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |26.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |26.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |28.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/temp_files/libyt-lib-temp_files.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_token_resolver/libyt-lib-yt_token_resolver.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |29.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |30.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |30.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |31.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |31.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |33.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/protos/receipt/libsqs_topic-protos-receipt.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |39.2%| PREPARE $(CLANG_FORMAT-3815817643) |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |40.3%| PREPARE $(YMAKE_PYTHON3-212672652) - 8.76 MB |40.4%| PREPARE $(YMAKE_PYTHON3) |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |41.0%| PREPARE $(LLD_ROOT-3107549726) - 33.57 MB |41.3%| PREPARE $(PYTHON) - 41.98 MB |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/holder/libsqs_topic-queue_url-holder.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |41.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |41.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |41.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |41.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |41.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |41.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |42.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |42.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |42.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |42.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |42.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |42.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |42.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |42.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |42.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |42.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |42.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |42.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |42.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |43.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |43.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |43.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.a |43.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |43.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |43.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |43.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |43.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |43.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |43.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |43.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |43.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |43.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |43.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |44.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |44.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |44.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |44.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |44.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |44.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |44.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |44.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |44.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |44.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |45.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |45.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/libbackup-import-protos.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |48.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |48.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |49.3%| PREPARE $(CLANG-1922233694) - 209.73 MB |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |50.0%| PREPARE $(CLANG16-1380963495) - 293.63 MB |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/node_broker |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |50.6%| PREPARE $(JDK17-2548586558) - 176.18 MB |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |50.5%| PREPARE $(CLANG18-1866954364) - 302.02 MB |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |50.7%| PREPARE $(WITH_JDK17-sbr:9470949154) |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |50.9%| PREPARE $(JDK_DEFAULT-2548586558) |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |51.2%| PREPARE $(CLANG20-882982600) - 303.09 MB |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |51.1%| PREPARE $(WITH_JDK-sbr:9470949154) |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |51.4%| PREPARE $(CLANG-2403293607) |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/printer/libformats-arrow-printer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |52.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |52.4%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |52.4%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |52.3%| [CP] {default-linux-x86_64, release, asan} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |52.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |52.3%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |52.2%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan/libclang_rt.asan-x86_64.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |52.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/cq_actor/libinterconnect-rdma-cq_actor.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |54.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/attributes/libcore-ymq-attributes.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/hashes_checker/liblibrary-login-hashes_checker.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/sasl/liblibrary-login-sasl.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |55.0%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |55.5%| PREPARE $(GDB) - 23.09 MB |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |55.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |55.6%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |55.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |55.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |55.9%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/fulltext/libfulltext_udf.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/cow_string/libcpp-containers-cow_string.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/objcopy_2a9fba044b5f98d2ff5f5c7f44.o |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/libpy3node_broker.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snowball/libcontrib-libs-snowball.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |56.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |57.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/show_create_view |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload_kv |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |57.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/cdc |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/backup_stress |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |56.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |56.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |56.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |57.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |57.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |57.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/libpy3workload_mixed.global.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_c0a0299090f532c29f2ee6dc87.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e508a8abac843a0a0f92fc62eb.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/objcopy_9ccdc4f01b578a43bc35d4d519.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/libpy3workload_testshard.global.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/objcopy_06a563d04a0e897145ef1061d2.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/libpy3show_create_view.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e55498abceca534315a6428452.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_8e0f2cf91b35e6051ad82535a4.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/workload/libpy3stress-viewer-workload.global.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/workload/libpy3show_create-table-workload.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_1815f02732d96389c328f04d90.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_df04396057094f2483296b9dbe.o |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_f0d8fb718a757998dc9403df32.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/objcopy_33061e6ec0580baa7f93f1ce23.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/objcopy_691607a9cbabb8d8c0161d1a6d.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/libpy3workload_topic_kafka.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/libpy3workload_kv.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/sqs_json/libsqs_json.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/session/libcommands-interactive-session.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/models/libinteractive-ai-models.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/workload/libpy3stress-streaming-workload.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/libcommands-interactive-ai.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/libsqs_workload.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/objcopy_4f24d349611ecfcf19e3001adb.o |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/workload/libpy3stress-backup-workload.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-sqs/liblibs-aws-sdk-cpp-aws-cpp-sdk-sqs.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydbd/export/libapps-ydbd-export.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_f022524c80d73c5ff5d59211dd.o |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/statistics_internal/libstatistics_internal_udf.global.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_4d329553dae503930cad7a3eec.o |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_6dc907e08e2d6bd66a6c998081.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/tools/libinteractive-ai-tools.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |58.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/common/libcommands-interactive-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/libpy3backup_stress.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |58.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/objcopy_7d7339f4588397fc771e31030c.o |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/libpy3cdc.global.a |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/type/libpy3result_set_format-workload-type.global.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_2650fc91e1b7439f48496ea51e.o |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_51c4be7ece033a38cc2a2dfb64.o |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/oom/py3test |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_ab542bc0d25a481768cec1ad31.o |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/libpy3stress-result_set_format-workload.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/ctas |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/streaming |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/run_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/libpy3ctas.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/objcopy_3cb499a0fcc9aa014af2855233.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/workload/libpy3stress-ctas-workload.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/interface/libfile-metadata-interface.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/libpy3streaming.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/impl/libfile-upload-impl.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_771af70678639da3d434ea1416.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kafka/tests/py3test |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_2983697b65fe3fbc437e0acf1f.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_aac602e1ee9075b2856f1975ca.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/objcopy_1ac33161075322a9a0efb211c2.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/impl/libfile-metadata-impl.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/quota/ut/unittest |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/interface/libfile-upload-interface.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/interface/libfmr-job_preparer-interface.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_stats/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/solomon/py3test |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/impl/libfmr-job_preparer-impl.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_priority_provider.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_init/unittest |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_testshard/unittest |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_indexes/unittest |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |60.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_11720a94357f4e62a79e34a420.o |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/column_table/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/federated_query/ut_service/fast/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_proxy/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/functional/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/constrained_schema.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/provided.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_session/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_export/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/tests/sql/solomon/pytest |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/sql/py3test |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/viewer/tests/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/list_topics/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| PREPARE $(FLAKE8_PY2-2255386470) - 8.50 MB |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_benches_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/checkpointing/ut/unittest |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/replication/unittest |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/olap_workload/tests/py3test |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/http/ut/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kv/tests/py3test |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serializable/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_generator.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |61.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_create.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/auth/gtest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/view/tests/py3test |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/row_table/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/mon_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| COMPACTING CACHE 112.0KiB |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/query_cache/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |60.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/gateway/ut/gtest |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/backup/tests/py3test |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_compaction/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_linear.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_narrow_map.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mutdict.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_sequence/unittest |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/s3_backups/tests/py3test |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |60.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/raw_socket/ut/unittest |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/public_http/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/s3_path_style/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceshard/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/time_cast/ut/unittest |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/result_set_format/tests/py3test |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |61.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/long_tx_service/ut/unittest |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/example/py3test |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/cdc/tests/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/unittest |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/shard/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/coordinator/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/query_actor/ut/unittest |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/kqprun/tests/py3test |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/simple_queue/tests/py3test |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/rate_limiter/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |61.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |61.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/auth/ydb-public-sdk-cpp-tests-integration-auth |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_tests/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_vacuum/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_disk_quotas/unittest |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |61.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |61.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/postgresql/py3test |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/fetcher/ut/unittest |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/limiter/grouped_memory/ut/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/tests/kikimr_tpch/unittest |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_svc/unittest |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/oltp_workload/tests/py3test |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/view/unittest |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.8%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.0%| RESOURCE $(sbr:4966407557) |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut/inside_ydb_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/executer_actor/ut/unittest |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |61.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/build_index/ut/unittest |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/splitter/ut/unittest |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |61.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_layers_integration.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_2a98e2f0e66f286cb125620511.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_8cba80b2275265b72407436cdf.o |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.{pb.h ... grpc.pb.h} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |62.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_priority_provider.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |62.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/dstool/py3test |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload_topic |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_rs/unittest |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/streaming/tests/py3test |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |62.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/describer/ut/unittest |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/high_load/unittest |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/table/tests/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/discovery/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/ut/unittest |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_followers/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/memory_controller/ut/unittest |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/wardens/py3test |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/generic/actors/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_c19b3eb5266bf8e49b2b628bc2.o |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_range_ops/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_reassign/unittest |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_keys/unittest |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/config/ut/unittest |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/rbo/objcopy_8a4839ce59e28719331c9dfe7e.o |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |63.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceproxy/ut/unittest |63.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/ctas/tests/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/mediator/ut/unittest |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/restarts/py3test |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_4e45fac9e6e2cbc502659b10eb.o |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |63.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |63.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/libpy3workload_topic.global.a |63.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/objcopy_e9d954952def1b899e1fb63731.o |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/dynamic.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_partitions.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_expand_map.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |63.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |63.4%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_d3749b17b0bc2057695c3e736a.o |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |63.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |63.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |63.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |63.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_ee633aebbed65e39e09fbf4271.o |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_6bdc69403f0fa7c03edbf19c28.o |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/pq_async_io/ut/unittest |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/quoter/ut/unittest |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/show_create_table |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/minikql_compile/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/yc_search_ut/unittest |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_column_stats/unittest |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/http_api/py3test |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/yt/kqp_yt_import/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/mlp/ut/unittest |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/keyvalue/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |63.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |63.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |63.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |63.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |63.3%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |63.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/libpy3show_create_table.global.a |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/pq_read/test/py3test |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic_kafka/tests/py3test |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_replication/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic/tests/py3test |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |63.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |63.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/plan2svg/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |63.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/encryption/py3test |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_background_compaction/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/objcopy_970514ee5aa7605a49b54b8feb.o |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |63.7%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/1bbc8f31f8eb46680aacf37392_raw.auxcpp |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_281dee3582d4ce59ddf75426c6.o |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_8d4997dfd93843149bbf2f76f1.o |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |64.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/transfer |63.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/library/ut/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel_unstable/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/testshard_workload/tests/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/solomon/actors/ut/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/result_set_format |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/ut/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |64.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |64.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |64.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |64.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |63.9%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/transfer/tests/py3test |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |64.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_88c3bdc430d7cb7f217b6a7f5c.o |64.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/cluster_directory.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |64.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |64.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |64.2%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |64.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |64.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/libpy3result_set_format.global.a |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/objcopy_e0d6c3883613601e455029f31f.o |64.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/libpy3transfer.global.a |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/objcopy_b632f28ee823f938d14c0e85f9.o |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_1c931ae3496b883d009f58ef8e.o |64.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/objcopy_ab0068aecc73dfe57505e1cc57.o |64.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |64.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |64.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/libpy3kafka_streams_test.global.a |64.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |64.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_5233daad98e22a16d60b4b7b86.o |64.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |64.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |65.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |65.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |65.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |65.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |65.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |65.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |65.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |66.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |66.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |67.9%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |68.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |69.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |69.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |69.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |70.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |70.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |70.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |71.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |72.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |72.8%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |72.9%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |72.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |73.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |73.9%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.4%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/kafka_streams_test |76.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/kafka_streams_test |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |76.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |76.7%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |76.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/backup_stress |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/node_broker |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/show_create_table |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/backup_stress |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/node_broker |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/node_broker |76.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/backup_stress |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload_mixed |76.9%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/show_create_table |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/show_create_table |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/workload_mixed |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/transfer |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/transfer |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/transfer |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload_topic |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/workload_topic |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload_topic |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.9%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.9%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |77.0%| [LD] {RESULT} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.0%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.0%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |77.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |77.1%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests >> BlobDepotWithTestShard::PlainGroup [GOOD] |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |77.1%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] |77.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_testshard/unittest >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.1%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |77.1%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |77.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] |77.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest |77.1%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/test_connection/ut/unittest |77.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |77.1%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/ut/unittest >> GroupedMemoryLimiter::Simplest [GOOD] >> GroupedMemoryLimiter::Simple [GOOD] >> GroupedMemoryLimiter::CommonUsage [GOOD] >> GroupedMemoryLimiter::Update [GOOD] |77.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |77.1%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/http/ut/unittest |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.1%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.1%| [TS] {RESULT} ydb/core/ymq/ut/unittest |77.1%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest |77.1%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/limiter/grouped_memory/ut/unittest >> GroupedMemoryLimiter::Update [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=1;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=3;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=2;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=7;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=4;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=5;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=6;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=9;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=4000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=10;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=11;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=8;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:100;name=GLOBAL;event=update;usage=1000;waiting=10;allocated=1;from=1000;to=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=20;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=13;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=12;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |77.1%| [TS] {RESULT} ydb/core/tx/limiter/grouped_memory/ut/unittest |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |77.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/limiter/grouped_memory/ut/unittest |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |77.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.2%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens >> BlobDepot::BasicPutAndGet |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |77.2%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.2%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/s3_backups |77.3%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/s3_backups |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |77.4%| [LD] {RESULT} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/olap_workload |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/olap_workload |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |77.8%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.8%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |77.9%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.1%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |78.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/show_create_view |78.3%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/show_create_view |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/show_create_view >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> BlobDepot::BasicRange |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.6%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |78.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |78.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |78.8%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload_kv |79.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/workload_kv |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload_kv |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/streaming |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/streaming |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/streaming |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/cdc |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/cdc |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/cdc >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/ctas |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/ctas |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/ctas >> BlobDepot::BasicDiscover [GOOD] >> BlobDepot::BasicBlock |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.5%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |79.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |79.5%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage >> test.py::test[solomon-BadDownsamplingAggregation-] |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |79.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.7%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |79.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/result_set_format |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/result_set_format |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/result_set_format |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |79.7%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |79.8%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |79.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |79.8%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.8%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.8%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |79.8%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |79.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |79.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |80.0%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> BlobDepot::VerifiedRandom [GOOD] >> BlobDepot::LoadPutAndRead |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |80.0%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.1%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.1%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |80.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |80.1%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> SelfHealActorTest::SingleErrorDisk [GOOD] >> BsControllerTest::SelfHealMirror3dc >> BsControllerTest::TestLocalSelfHeal >> BsControllerTest::SelfHealBlock4Plus2 >> BsControllerTest::DecommitRejected >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.2%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2026-01-10T11:55:13.540899Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-10T11:55:13.541052Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:13.541351Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-10T11:55:13.541382Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:13.541685Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-10T11:55:13.541819Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:13.541870Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-10T11:55:13.541893Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:13.542120Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-10T11:55:13.542145Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:13.542277Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-10T11:55:13.542300Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:13.542470Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-10T11:55:13.542492Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:13.542525Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-10T11:55:13.542548Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:13.543137Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-10T11:55:13.543179Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:13.543220Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-10T11:55:13.543242Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:13.543275Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-10T11:55:13.543296Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:13.543582Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-10T11:55:13.543607Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:13.543642Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-10T11:55:13.543663Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:13.543928Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-10T11:55:13.544052Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:13.544181Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-10T11:55:13.544206Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:13.564955Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2026-01-10T11:55:13.565712Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2026-01-10T11:55:13.565771Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2026-01-10T11:55:13.565810Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2026-01-10T11:55:13.565850Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2026-01-10T11:55:13.565889Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2026-01-10T11:55:13.565967Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2026-01-10T11:55:13.566010Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2026-01-10T11:55:13.566050Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2026-01-10T11:55:13.566089Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2026-01-10T11:55:13.566150Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2026-01-10T11:55:13.566194Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2026-01-10T11:55:13.566248Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2026-01-10T11:55:13.566286Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2026-01-10T11:55:13.566327Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2026-01-10T11:55:13.627427Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:13.627970Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:13.628003Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:13.628192Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:13.628225Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:13.628258Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:13.629832Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:13.629870Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:13.630178Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:13.630214Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:13.630535Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:13.630569Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:13.630600Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:13.631145Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:13.631328Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:13.652563Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2026-01-10T11:55:13.653021Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2026-01-10T11:55:13.686725Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2026-01-10T11:55:13.686783Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2026-01-10T11:55:13.686829Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2026-01-10T11:55:13.686870Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2026-01-10T11:55:13.686909Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2026-01-10T11:55:13.686939Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2026-01-10T11:55:13.686975Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2026-01-10T11:55:13.686996Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2026-01-10T11:55:13.687034Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2026-01-10T11:55:13.687055Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2026-01-10T11:55:13.687084Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2026-01-10T11:55:13.687113Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2026-01-10T11:55:13.687177Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2026-01-10T11:55:13.687201Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2026-01-10T11:55:13.687237Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2026-01-10T11:55:13.687262Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2026-01-10T11:55:13.687300Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2026-01-10T11:55:13.687325Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2026-01-10T11:55:13.687363Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2026-01-10T11:55:13.687397Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2026-01-10T11:55:13.687435Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2026-01-10T11:55:13.687457Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2026-01-10T11:55:13.687493Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2026-01-10T11:55:13.687525Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2026-01-10T11:55:13.687562Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2026-01-10T11:55:13.687581Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2026-01-10T11:55:13.687609Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2026-01-10T11:55:13.687629Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2026-01-10T11:55:13.690887Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:13.691243Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-10T11:55:13.759258Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2026-01-10T11:55:13.760377Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-01-10T11:55:13.760429Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-10T11:55:13.760483Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2026-01-10T11:55:13.760572Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-10T11:55:13.760597Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-10T11:55:13.760630Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2026-01-10T11:55:13.760699Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-01-10T11:55:13.760733Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-10T11:55:13.760775Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2026-01-10T11:55:13.760838Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-01-10T11:55:13.760862Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-01-10T11:55:13.760897Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2026-01-10T1 ... Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.857124Z 14 00h01m06.382536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to REPLICATING 2026-01-10T11:55:15.857905Z 1 00h01m06.382536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.858601Z 1 00h01m10.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.859576Z 1 00h01m14.207512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2026-01-10T11:55:15.860017Z 1 00h01m14.207512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.861848Z 10 00h01m17.675512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2026-01-10T11:55:15.862875Z 1 00h01m17.675512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.863541Z 12 00h01m18.468512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2026-01-10T11:55:15.863941Z 1 00h01m18.468512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.864330Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.864401Z 14 00h01m21.045536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2026-01-10T11:55:15.865571Z 1 00h01m21.045536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.866849Z 8 00h01m21.046048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:15.867106Z 8 00h01m21.046048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2026-01-10T11:55:15.867645Z 14 00h01m23.109512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2026-01-10T11:55:15.869035Z 1 00h01m23.109512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.869381Z 15 00h01m23.929512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2026-01-10T11:55:15.869826Z 1 00h01m23.929512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-01-10T11:55:15.870476Z 13 00h01m25.419024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2026-01-10T11:55:15.871743Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483648 2026-01-10T11:55:15.874459Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.874495Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2026-01-10T11:55:15.875866Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.875888Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2026-01-10T11:55:15.877098Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877246Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2026-01-10T11:55:15.877377Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877396Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2026-01-10T11:55:15.877415Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877539Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2026-01-10T11:55:15.877675Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877693Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2026-01-10T11:55:15.877711Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877728Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2026-01-10T11:55:15.877746Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:15.877881Z 1 00h01m25.419024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2026-01-10T11:55:15.884450Z 1 00h01m25.419536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:15.884713Z 1 00h01m25.419536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2026-01-10T11:55:15.886854Z 1 00h01m25.419536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2026-01-10T11:55:15.886877Z 1 00h01m25.419536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483648 Success# true 2026-01-10T11:55:15.887929Z 7 00h01m25.419536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2026-01-10T11:55:15.887959Z 7 00h01m25.419536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2026-01-10T11:55:15.888136Z 2 00h01m25.419536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-01-10T11:55:15.888169Z 2 00h01m25.419536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2026-01-10T11:55:15.888450Z 3 00h01m25.419536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-10T11:55:15.888481Z 3 00h01m25.419536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2026-01-10T11:55:15.888933Z 4 00h01m25.419536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-01-10T11:55:15.889097Z 4 00h01m25.419536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2026-01-10T11:55:15.889145Z 5 00h01m25.419536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-01-10T11:55:15.889171Z 5 00h01m25.419536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2026-01-10T11:55:15.889326Z 6 00h01m25.419536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-10T11:55:15.889355Z 6 00h01m25.419536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2026-01-10T11:55:15.889613Z 9 00h01m25.419536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:15.889655Z 13 00h01m25.419536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-10T11:55:15.889897Z 13 00h01m25.419536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2026-01-10T11:55:15.890045Z 14 00h01m25.419536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-01-10T11:55:15.890070Z 14 00h01m25.419536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2026-01-10T11:55:15.890450Z 15 00h01m25.419536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:15.890477Z 15 00h01m25.419536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2026-01-10T11:55:15.890620Z 15 00h01m25.419536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2026-01-10T11:55:15.893828Z 15 00h01m27.024536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2026-01-10T11:55:15.895113Z 3 00h01m27.065512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2026-01-10T11:55:15.899642Z 13 00h01m36.083512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2026-01-10T11:55:15.901666Z 11 00h01m37.137512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2026-01-10T11:55:15.904281Z 2 00h01m40.032512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2026-01-10T11:55:15.908055Z 15 00h01m43.101536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2026-01-10T11:55:15.912342Z 9 00h01m43.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:15.912522Z 9 00h01m43.102048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |80.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |80.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.3%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool |80.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select >> BsControllerTest::TestLocalSelfHeal [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.4%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2026-01-10T11:55:13.361433Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-10T11:55:13.361491Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:13.361589Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-10T11:55:13.361614Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:13.361659Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-10T11:55:13.361681Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:13.361720Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-10T11:55:13.361742Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:13.361774Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-10T11:55:13.361794Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:13.361842Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-10T11:55:13.361863Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:13.361896Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-10T11:55:13.361928Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:13.361977Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-10T11:55:13.362002Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:13.362050Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-10T11:55:13.362073Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:13.362106Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-10T11:55:13.362127Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:13.362159Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-10T11:55:13.362181Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:13.362263Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-10T11:55:13.362296Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:13.362335Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-10T11:55:13.362357Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:13.362397Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-10T11:55:13.362419Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:13.362460Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-10T11:55:13.362496Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:13.362533Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-10T11:55:13.362554Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-10T11:55:13.362596Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-10T11:55:13.362617Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-10T11:55:13.362654Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-10T11:55:13.362674Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-10T11:55:13.362708Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-10T11:55:13.362729Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-10T11:55:13.362768Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-10T11:55:13.362789Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-10T11:55:13.362828Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-10T11:55:13.362849Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-10T11:55:13.362883Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-10T11:55:13.362906Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-10T11:55:13.362949Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-10T11:55:13.362985Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-10T11:55:13.363027Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-10T11:55:13.363048Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-10T11:55:13.363080Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-10T11:55:13.363105Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-10T11:55:13.363138Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-10T11:55:13.363178Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-10T11:55:13.363211Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-10T11:55:13.363239Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-10T11:55:13.363291Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-10T11:55:13.363314Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-10T11:55:13.363351Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-10T11:55:13.363372Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-10T11:55:13.363419Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-10T11:55:13.363442Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-10T11:55:13.363484Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-10T11:55:13.363515Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-10T11:55:13.363554Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-10T11:55:13.363575Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-10T11:55:13.363609Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-10T11:55:13.363630Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-10T11:55:13.363686Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-10T11:55:13.363708Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-10T11:55:13.363743Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-10T11:55:13.363769Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-10T11:55:13.363815Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-10T11:55:13.363836Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-10T11:55:13.390693Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-10T11:55:13.393315Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-10T11:55:13.393370Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-10T11:55:13.393512Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-10T11:55:13.393658Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-10T11:55:13.393691Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-10T11:55:13.393724Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-10T11:55:13.393762Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-10T11:55:13.393880Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-10T11:55:13.393985Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-10T11:55:13.394022Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-10T11:55:13.394054Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-10T11:55:13.394205Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-10T11:55:13.394655Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-10T11:55:13.394689Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-10T11:55:13.394869Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-10T11:55:13.394905Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-10T11:55:13.395073Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-10T11:55:13.395109Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-10T11:55:13.395140Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-10T11:55:13.395186Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-10T11:55:13.395469Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-10T11:55:13.395515Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-10T11:55:13.395548Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-10T11:55:13.395580Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-10T11:55:13.395611Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-10T11:55:13.395795Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-10T11:55:13.395831Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-10T11:55:13.396282Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-10T11:55:13.396466Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-10T11:55:13.396647Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-10T11:55:13.396683Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-10T11:55:13.396716Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-10T11:55:13.396877Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-10T11:55:13.397258Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... rue Replicated# true 2026-01-10T11:55:17.282544Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483756 VDiskId# [8000006c:1:2:1:0] DiskIsOk# true 2026-01-10T11:55:17.282586Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483756 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.282622Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483756 VDiskId# [8000006c:1:2:2:0] DiskIsOk# true 2026-01-10T11:55:17.288977Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483756 Items# [8000006c:1:1:0:0]: 15:1001:1006 -> 15:1000:1010 ConfigTxSeqNo# 48 2026-01-10T11:55:17.289030Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483756 Success# true 2026-01-10T11:55:17.289207Z 18 00h05m00.105120s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-10T11:55:17.289300Z 18 00h05m00.105120s :BS_NODE DEBUG: [18] VDiskId# [8000006c:1:1:1:0] -> [8000006c:2:1:1:0] 2026-01-10T11:55:17.289401Z 3 00h05m00.105120s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-10T11:55:17.289446Z 3 00h05m00.105120s :BS_NODE DEBUG: [3] VDiskId# [8000006c:1:0:0:0] -> [8000006c:2:0:0:0] 2026-01-10T11:55:17.289542Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-10T11:55:17.289592Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] VDiskId# [8000006c:1:1:2:0] -> [8000006c:2:1:2:0] 2026-01-10T11:55:17.289695Z 6 00h05m00.105120s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-10T11:55:17.289757Z 6 00h05m00.105120s :BS_NODE DEBUG: [6] VDiskId# [8000006c:1:0:1:0] -> [8000006c:2:0:1:0] 2026-01-10T11:55:17.289858Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:17.289908Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] VDiskId# [8000006c:1:0:2:0] -> [8000006c:2:0:2:0] 2026-01-10T11:55:17.289997Z 27 00h05m00.105120s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2026-01-10T11:55:17.290054Z 27 00h05m00.105120s :BS_NODE DEBUG: [27] VDiskId# [8000006c:1:2:0:0] -> [8000006c:2:2:0:0] 2026-01-10T11:55:17.290136Z 30 00h05m00.105120s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2026-01-10T11:55:17.290185Z 30 00h05m00.105120s :BS_NODE DEBUG: [30] VDiskId# [8000006c:1:2:1:0] -> [8000006c:2:2:1:0] 2026-01-10T11:55:17.290297Z 15 00h05m00.105120s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.290339Z 15 00h05m00.105120s :BS_NODE DEBUG: [15] VDiskId# [8000006c:2:1:0:0] PDiskId# 1000 VSlotId# 1010 created 2026-01-10T11:55:17.290417Z 15 00h05m00.105120s :BS_NODE DEBUG: [15] VDiskId# [8000006c:2:1:0:0] status changed to INIT_PENDING 2026-01-10T11:55:17.290514Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-10T11:55:17.290562Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] VDiskId# [8000006c:1:2:2:0] -> [8000006c:2:2:2:0] 2026-01-10T11:55:17.290872Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483740 2026-01-10T11:55:17.291990Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292055Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:0:0:0] DiskIsOk# true 2026-01-10T11:55:17.292123Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292154Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:0:1:0] DiskIsOk# true 2026-01-10T11:55:17.292186Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292218Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:0:2:0] DiskIsOk# true 2026-01-10T11:55:17.292250Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292279Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:1:1:0] DiskIsOk# true 2026-01-10T11:55:17.292309Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292353Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:1:2:0] DiskIsOk# true 2026-01-10T11:55:17.292404Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292439Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:2:0:0] DiskIsOk# true 2026-01-10T11:55:17.292471Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292498Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:2:1:0] DiskIsOk# true 2026-01-10T11:55:17.292528Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483740 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:17.292570Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483740 VDiskId# [8000005c:1:2:2:0] DiskIsOk# true 2026-01-10T11:55:17.298945Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483740 Items# [8000005c:1:1:0:0]: 15:1001:1005 -> 15:1002:1010 ConfigTxSeqNo# 49 2026-01-10T11:55:17.299002Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483740 Success# true 2026-01-10T11:55:17.299273Z 18 00h05m00.105632s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2026-01-10T11:55:17.299346Z 18 00h05m00.105632s :BS_NODE DEBUG: [18] VDiskId# [8000005c:1:1:1:0] -> [8000005c:2:1:1:0] 2026-01-10T11:55:17.299451Z 3 00h05m00.105632s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-10T11:55:17.299500Z 3 00h05m00.105632s :BS_NODE DEBUG: [3] VDiskId# [8000005c:1:0:0:0] -> [8000005c:2:0:0:0] 2026-01-10T11:55:17.299628Z 21 00h05m00.105632s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-10T11:55:17.299681Z 21 00h05m00.105632s :BS_NODE DEBUG: [21] VDiskId# [8000005c:1:1:2:0] -> [8000005c:2:1:2:0] 2026-01-10T11:55:17.299766Z 6 00h05m00.105632s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-10T11:55:17.299824Z 6 00h05m00.105632s :BS_NODE DEBUG: [6] VDiskId# [8000005c:1:0:1:0] -> [8000005c:2:0:1:0] 2026-01-10T11:55:17.299923Z 9 00h05m00.105632s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:17.299980Z 9 00h05m00.105632s :BS_NODE DEBUG: [9] VDiskId# [8000005c:1:0:2:0] -> [8000005c:2:0:2:0] 2026-01-10T11:55:17.300115Z 27 00h05m00.105632s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2026-01-10T11:55:17.300166Z 27 00h05m00.105632s :BS_NODE DEBUG: [27] VDiskId# [8000005c:1:2:0:0] -> [8000005c:2:2:0:0] 2026-01-10T11:55:17.300283Z 30 00h05m00.105632s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2026-01-10T11:55:17.300334Z 30 00h05m00.105632s :BS_NODE DEBUG: [30] VDiskId# [8000005c:1:2:1:0] -> [8000005c:2:2:1:0] 2026-01-10T11:55:17.300447Z 15 00h05m00.105632s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.300496Z 15 00h05m00.105632s :BS_NODE DEBUG: [15] VDiskId# [8000005c:2:1:0:0] PDiskId# 1002 VSlotId# 1010 created 2026-01-10T11:55:17.300572Z 15 00h05m00.105632s :BS_NODE DEBUG: [15] VDiskId# [8000005c:2:1:0:0] status changed to INIT_PENDING 2026-01-10T11:55:17.300673Z 33 00h05m00.105632s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-10T11:55:17.300724Z 33 00h05m00.105632s :BS_NODE DEBUG: [33] VDiskId# [8000005c:1:2:2:0] -> [8000005c:2:2:2:0] 2026-01-10T11:55:17.301872Z 15 00h05m01.286048s :BS_NODE DEBUG: [15] VDiskId# [8000004c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.302835Z 15 00h05m01.297608s :BS_NODE DEBUG: [15] VDiskId# [8000007c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.303727Z 15 00h05m03.337632s :BS_NODE DEBUG: [15] VDiskId# [8000005c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.304650Z 15 00h05m03.545584s :BS_NODE DEBUG: [15] VDiskId# [8000001c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.305525Z 15 00h05m04.940560s :BS_NODE DEBUG: [15] VDiskId# [8000003c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.307372Z 15 00h05m05.151120s :BS_NODE DEBUG: [15] VDiskId# [8000006c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.308223Z 15 00h05m05.618096s :BS_NODE DEBUG: [15] VDiskId# [8000000c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.309088Z 15 00h05m05.753072s :BS_NODE DEBUG: [15] VDiskId# [8000002c:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:17.310331Z 15 00h05m11.893608s :BS_NODE DEBUG: [15] VDiskId# [8000007c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.311841Z 15 00h05m11.894120s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.311919Z 15 00h05m11.894120s :BS_NODE DEBUG: [15] VDiskId# [8000007c:1:1:0:0] destroyed 2026-01-10T11:55:17.313455Z 15 00h05m20.972096s :BS_NODE DEBUG: [15] VDiskId# [8000000c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.314960Z 15 00h05m20.972608s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.315016Z 15 00h05m20.972608s :BS_NODE DEBUG: [15] VDiskId# [8000000c:1:1:0:0] destroyed 2026-01-10T11:55:17.315225Z 15 00h05m22.266584s :BS_NODE DEBUG: [15] VDiskId# [8000001c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.316658Z 15 00h05m22.267096s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.316717Z 15 00h05m22.267096s :BS_NODE DEBUG: [15] VDiskId# [8000001c:1:1:0:0] destroyed 2026-01-10T11:55:17.316895Z 15 00h05m24.976120s :BS_NODE DEBUG: [15] VDiskId# [8000006c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.318417Z 15 00h05m24.976632s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.318467Z 15 00h05m24.976632s :BS_NODE DEBUG: [15] VDiskId# [8000006c:1:1:0:0] destroyed 2026-01-10T11:55:17.318971Z 15 00h05m25.622048s :BS_NODE DEBUG: [15] VDiskId# [8000004c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.320402Z 15 00h05m25.622560s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.320460Z 15 00h05m25.622560s :BS_NODE DEBUG: [15] VDiskId# [8000004c:1:1:0:0] destroyed 2026-01-10T11:55:17.320610Z 15 00h05m26.413560s :BS_NODE DEBUG: [15] VDiskId# [8000003c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.322430Z 15 00h05m26.414072s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.322604Z 15 00h05m26.414072s :BS_NODE DEBUG: [15] VDiskId# [8000003c:1:1:0:0] destroyed 2026-01-10T11:55:17.323215Z 15 00h05m29.585072s :BS_NODE DEBUG: [15] VDiskId# [8000002c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.325668Z 15 00h05m29.585584s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.325709Z 15 00h05m29.585584s :BS_NODE DEBUG: [15] VDiskId# [8000002c:1:1:0:0] destroyed 2026-01-10T11:55:17.326968Z 15 00h05m30.910632s :BS_NODE DEBUG: [15] VDiskId# [8000005c:2:1:0:0] status changed to READY 2026-01-10T11:55:17.329013Z 15 00h05m30.911144s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:17.329054Z 15 00h05m30.911144s :BS_NODE DEBUG: [15] VDiskId# [8000005c:1:1:0:0] destroyed |80.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> BsControllerTest::TestLocalBrokenRelocation [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |80.5%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2026-01-10T11:55:16.319952Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-10T11:55:16.319993Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:16.320053Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-10T11:55:16.320070Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:16.320122Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-10T11:55:16.320136Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:16.320162Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-10T11:55:16.320174Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:16.320194Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-10T11:55:16.320207Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:16.320226Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-10T11:55:16.320240Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:16.320270Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-10T11:55:16.320293Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:16.320317Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-10T11:55:16.320334Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:16.320364Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-10T11:55:16.320379Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:16.320441Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-10T11:55:16.320455Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:16.320476Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-10T11:55:16.320487Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:16.320507Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-10T11:55:16.320530Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:16.320555Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-10T11:55:16.320568Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:16.320591Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-10T11:55:16.320604Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:16.320632Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-10T11:55:16.320653Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:16.320679Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-10T11:55:16.320692Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-10T11:55:16.320719Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-10T11:55:16.320732Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-10T11:55:16.320750Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-10T11:55:16.320763Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-10T11:55:16.320786Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-10T11:55:16.320797Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-10T11:55:16.320819Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-10T11:55:16.320831Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-10T11:55:16.320853Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-10T11:55:16.320865Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-10T11:55:16.320886Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-10T11:55:16.320898Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-10T11:55:16.320928Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-10T11:55:16.320952Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-10T11:55:16.320976Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-10T11:55:16.320989Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-10T11:55:16.321010Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-10T11:55:16.321025Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-10T11:55:16.321058Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-10T11:55:16.321072Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-10T11:55:16.321092Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-10T11:55:16.321106Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-10T11:55:16.321128Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-10T11:55:16.321140Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-10T11:55:16.321162Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-10T11:55:16.321175Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-10T11:55:16.321203Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-10T11:55:16.321217Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-10T11:55:16.321244Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-10T11:55:16.321264Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-10T11:55:16.321290Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-10T11:55:16.321303Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-10T11:55:16.321322Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-10T11:55:16.321358Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-10T11:55:16.321385Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-10T11:55:16.321399Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-10T11:55:16.321421Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-10T11:55:16.321436Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-10T11:55:16.321468Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-10T11:55:16.321482Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-10T11:55:16.335888Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-10T11:55:16.336953Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-10T11:55:16.336995Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-10T11:55:16.337025Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-10T11:55:16.337054Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-10T11:55:16.337078Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-10T11:55:16.337103Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-10T11:55:16.337126Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-10T11:55:16.337150Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-10T11:55:16.337176Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-10T11:55:16.337200Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-10T11:55:16.337223Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-10T11:55:16.337252Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-10T11:55:16.337307Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-10T11:55:16.337332Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-10T11:55:16.337366Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-10T11:55:16.337395Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-10T11:55:16.337427Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-10T11:55:16.337455Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-10T11:55:16.337479Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-10T11:55:16.337512Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-10T11:55:16.337548Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-10T11:55:16.337579Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-10T11:55:16.337603Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-10T11:55:16.337629Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-10T11:55:16.337654Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-10T11:55:16.337689Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-10T11:55:16.337720Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-10T11:55:16.337756Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-10T11:55:16.337795Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-10T11:55:16.337832Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-10T11:55:16.337858Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-10T11:55:16.337881Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-10T11:55:16.337906Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-10T11:55:16.337946Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2026-01-10T11:55:24.723338Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2026-01-10T11:55:24.723384Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2026-01-10T11:55:24.723424Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2026-01-10T11:55:24.723459Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2026-01-10T11:55:24.724179Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-10T11:55:24.724252Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2026-01-10T11:55:24.724299Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2026-01-10T11:55:24.724355Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2026-01-10T11:55:24.724407Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2026-01-10T11:55:24.724451Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2026-01-10T11:55:24.724492Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2026-01-10T11:55:24.724536Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2026-01-10T11:55:24.724591Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2026-01-10T11:55:24.724635Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2026-01-10T11:55:24.724688Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2026-01-10T11:55:24.724740Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2026-01-10T11:55:24.724782Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2026-01-10T11:55:24.724826Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2026-01-10T11:55:24.724870Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2026-01-10T11:55:24.724919Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2026-01-10T11:55:24.724978Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2026-01-10T11:55:24.725692Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2026-01-10T11:55:24.725767Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2026-01-10T11:55:24.725820Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2026-01-10T11:55:24.725864Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2026-01-10T11:55:24.725906Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2026-01-10T11:55:24.725962Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2026-01-10T11:55:24.726017Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2026-01-10T11:55:24.726061Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2026-01-10T11:55:24.726116Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2026-01-10T11:55:24.726162Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2026-01-10T11:55:24.726212Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2026-01-10T11:55:24.726255Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2026-01-10T11:55:24.726725Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2026-01-10T11:55:24.726789Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2026-01-10T11:55:24.726857Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2026-01-10T11:55:24.726905Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2026-01-10T11:55:24.726968Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2026-01-10T11:55:24.727017Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2026-01-10T11:55:24.727071Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2026-01-10T11:55:24.727153Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2026-01-10T11:55:24.727203Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2026-01-10T11:55:24.730217Z 2 01h25m01.569560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.730744Z 2 01h25m01.930560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.731193Z 7 01h25m01.971560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2026-01-10T11:55:24.731665Z 7 01h25m01.975560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2026-01-10T11:55:24.732035Z 5 01h25m02.279560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.732516Z 4 01h25m02.426560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.732949Z 7 01h25m03.054560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2026-01-10T11:55:24.733363Z 8 01h25m03.365560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2026-01-10T11:55:24.733768Z 4 01h25m03.775560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.734119Z 5 01h25m03.866560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.734466Z 10 01h25m04.213560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2026-01-10T11:55:24.734857Z 10 01h25m04.713560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2026-01-10T11:55:24.735334Z 10 01h25m04.735560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2026-01-10T11:55:24.735717Z 4 01h25m04.888560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.737695Z 7 01h25m05.635560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2026-01-10T11:55:24.738179Z 4 01h25m06.086560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2026-01-10T11:55:24.738930Z 2 01h25m11.782560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2026-01-10T11:55:24.739940Z 1 01h25m11.783072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.740013Z 1 01h25m11.783072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2026-01-10T11:55:24.741008Z 4 01h25m16.131560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2026-01-10T11:55:24.741861Z 1 01h25m16.132072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.741918Z 1 01h25m16.132072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2026-01-10T11:55:24.742071Z 4 01h25m17.026560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2026-01-10T11:55:24.742968Z 1 01h25m17.027072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.743027Z 1 01h25m17.027072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2026-01-10T11:55:24.743156Z 2 01h25m18.772560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2026-01-10T11:55:24.743931Z 1 01h25m18.773072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.743971Z 1 01h25m18.773072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2026-01-10T11:55:24.744423Z 10 01h25m22.952560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2026-01-10T11:55:24.745254Z 1 01h25m22.953072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.745302Z 1 01h25m22.953072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2026-01-10T11:55:24.745670Z 7 01h25m26.535560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2026-01-10T11:55:24.746496Z 1 01h25m26.536072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.746561Z 1 01h25m26.536072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2026-01-10T11:55:24.746993Z 5 01h25m28.973560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2026-01-10T11:55:24.747680Z 1 01h25m28.974072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.747725Z 1 01h25m28.974072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2026-01-10T11:55:24.747828Z 7 01h25m29.961560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2026-01-10T11:55:24.748545Z 1 01h25m29.962072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.748589Z 1 01h25m29.962072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2026-01-10T11:55:24.749508Z 8 01h25m30.694560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2026-01-10T11:55:24.750318Z 1 01h25m30.695072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.750364Z 1 01h25m30.695072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2026-01-10T11:55:24.750761Z 7 01h25m32.010560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2026-01-10T11:55:24.751474Z 1 01h25m32.011072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.751518Z 1 01h25m32.011072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2026-01-10T11:55:24.751853Z 4 01h25m32.750560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2026-01-10T11:55:24.752569Z 1 01h25m32.751072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.752616Z 1 01h25m32.751072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2026-01-10T11:55:24.752722Z 4 01h25m33.284560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2026-01-10T11:55:24.753510Z 1 01h25m33.285072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.753556Z 1 01h25m33.285072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2026-01-10T11:55:24.753623Z 7 01h25m33.568560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2026-01-10T11:55:24.754413Z 1 01h25m33.569072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.754461Z 1 01h25m33.569072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2026-01-10T11:55:24.754876Z 5 01h25m34.508560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2026-01-10T11:55:24.755603Z 1 01h25m34.509072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.755655Z 1 01h25m34.509072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2026-01-10T11:55:24.756287Z 10 01h25m35.492560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2026-01-10T11:55:24.757049Z 1 01h25m35.493072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.757116Z 1 01h25m35.493072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2026-01-10T11:55:24.757515Z 10 01h25m38.590560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2026-01-10T11:55:24.758201Z 1 01h25m38.591072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-01-10T11:55:24.758247Z 1 01h25m38.591072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> BSCRestartPDisk::RestartOneByOne >> BSCRestartPDisk::RestartNotAllowed >> BSCRestartPDisk::RestartOneByOneWithReconnects |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> VDiskBalancing::TestStopOneNode_Mirror3dc >> VDiskBalancing::TestRandom_Block42 >> VDiskBalancing::TestStopOneNode_Block42 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> VDiskBalancing::TestRandom_Mirror3dc >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> BlobDepot::DecommitVerifiedRandom [GOOD] >> BlobDepot::CheckIntegrity |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2026-01-10T11:55:13.610857Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-10T11:55:13.610924Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:13.611013Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-10T11:55:13.611059Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:13.611108Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-10T11:55:13.611132Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:13.611187Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-10T11:55:13.611224Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:13.611264Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-10T11:55:13.611289Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:13.611335Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-10T11:55:13.611360Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:13.611406Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-10T11:55:13.611433Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:13.611476Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-10T11:55:13.611500Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:13.611542Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-10T11:55:13.611565Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:13.611611Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-10T11:55:13.611649Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:13.611691Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-10T11:55:13.611717Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:13.611760Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-10T11:55:13.611784Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:13.611833Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-10T11:55:13.611859Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:13.611903Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-10T11:55:13.611929Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:13.612002Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-10T11:55:13.612030Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:13.612099Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-10T11:55:13.612124Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-10T11:55:13.612161Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-10T11:55:13.612185Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-10T11:55:13.612220Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-10T11:55:13.612256Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-10T11:55:13.612317Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-10T11:55:13.612345Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-10T11:55:13.612382Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-10T11:55:13.612406Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-10T11:55:13.612441Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-10T11:55:13.612465Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-10T11:55:13.612506Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-10T11:55:13.612531Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-10T11:55:13.612567Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-10T11:55:13.612591Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-10T11:55:13.612628Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-10T11:55:13.612651Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-10T11:55:13.612699Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-10T11:55:13.612733Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-10T11:55:13.612778Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-10T11:55:13.612815Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-10T11:55:13.612855Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-10T11:55:13.612881Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-10T11:55:13.612927Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-10T11:55:13.612951Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-10T11:55:13.612995Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-10T11:55:13.613020Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-10T11:55:13.613058Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-10T11:55:13.613082Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-10T11:55:13.613124Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-10T11:55:13.613147Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-10T11:55:13.613202Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-10T11:55:13.613230Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-10T11:55:13.724018Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2163:55] Status# ERROR ClientId# [1:2163:55] ServerId# [0:0:0] PipeClient# [1:2163:55] 2026-01-10T11:55:13.726207Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2164:37] Status# ERROR ClientId# [2:2164:37] ServerId# [0:0:0] PipeClient# [2:2164:37] 2026-01-10T11:55:13.726267Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2165:37] Status# ERROR ClientId# [3:2165:37] ServerId# [0:0:0] PipeClient# [3:2165:37] 2026-01-10T11:55:13.726429Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2166:37] Status# ERROR ClientId# [4:2166:37] ServerId# [0:0:0] PipeClient# [4:2166:37] 2026-01-10T11:55:13.726470Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2167:37] Status# ERROR ClientId# [5:2167:37] ServerId# [0:0:0] PipeClient# [5:2167:37] 2026-01-10T11:55:13.726512Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2168:37] Status# ERROR ClientId# [6:2168:37] ServerId# [0:0:0] PipeClient# [6:2168:37] 2026-01-10T11:55:13.726545Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2169:37] Status# ERROR ClientId# [7:2169:37] ServerId# [0:0:0] PipeClient# [7:2169:37] 2026-01-10T11:55:13.726583Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2170:37] Status# ERROR ClientId# [8:2170:37] ServerId# [0:0:0] PipeClient# [8:2170:37] 2026-01-10T11:55:13.726687Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2171:37] Status# ERROR ClientId# [9:2171:37] ServerId# [0:0:0] PipeClient# [9:2171:37] 2026-01-10T11:55:13.726730Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2172:37] Status# ERROR ClientId# [10:2172:37] ServerId# [0:0:0] PipeClient# [10:2172:37] 2026-01-10T11:55:13.726824Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2173:37] Status# ERROR ClientId# [11:2173:37] ServerId# [0:0:0] PipeClient# [11:2173:37] 2026-01-10T11:55:13.726918Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2174:37] Status# ERROR ClientId# [12:2174:37] ServerId# [0:0:0] PipeClient# [12:2174:37] 2026-01-10T11:55:13.727003Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2175:37] Status# ERROR ClientId# [13:2175:37] ServerId# [0:0:0] PipeClient# [13:2175:37] 2026-01-10T11:55:13.727056Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2176:37] Status# ERROR ClientId# [14:2176:37] ServerId# [0:0:0] PipeClient# [14:2176:37] 2026-01-10T11:55:13.727094Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2177:37] Status# ERROR ClientId# [15:2177:37] ServerId# [0:0:0] PipeClient# [15:2177:37] 2026-01-10T11:55:13.727131Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2178:37] Status# ERROR ClientId# [16:2178:37] ServerId# [0:0:0] PipeClient# [16:2178:37] 2026-01-10T11:55:13.727187Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2179:37] Status# ERROR ClientId# [17:2179:37] ServerId# [0:0:0] PipeClient# [17:2179:37] 2026-01-10T11:55:13.727243Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2180:37] Status# ERROR ClientId# [18:2180:37] ServerId# [0:0:0] PipeClient# [18:2180:37] 2026-01-10T11:55:13.727280Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2181:37] Status# ERROR ClientId# [19:2181:37] ServerId# [0:0:0] PipeClient# [19:2181:37] 2026-01-10T11:55:13.727316Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2182:37] Status# ERROR ClientId# [20:2182:37] ServerId# [0:0:0] PipeClient# [20:2182:37] 2026-01-10T11:55:13.727525Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2183:37] Status# ERROR ClientId# [21:2183:37] ServerId# [0:0:0] PipeClient# [21:2183:37] 2026-01-10T11:55:13.727573Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2184:37] Status# ERROR ClientId# [22:2184:37] ServerId# [0:0:0] PipeClient# [22:2184:37] 2026-01-10T11:55:13.727610Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2185:37] Status# ERROR ClientId# [23:2185:37] ServerId# [0:0:0] PipeClient# [23:2185:37] 2026-01-10T11:55:13.727652Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2186:37] Status# ERROR ClientId# [24:2186:37] ServerId# [0:0:0] PipeClient# [24:2186:37] 2026-01-10T11:55:13.727791Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2187:37] Status# ERROR ClientId# [25:2187:37] ServerId# [0:0:0] PipeClient# [25:2187:37] 2026-01-10T11:55:13.728101Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2188:37] Status# ERROR ClientId# [26:2188:37] ServerId# [0:0:0] PipeClient# [26:2188:37] 2026-01-10T11:55:13.728306Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2189:37] Status# ERROR ClientId# [27:2189:37] ServerId# [0:0:0] PipeClient# [27:2189:37] 2026-01-10T11:55:13.728360Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2190:37] Status# ERROR ClientId# [28:2190:37] ServerId# [0:0:0] PipeClient# [28:2190:37] 2026-01-10T11:55:13.728403Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2191:37] Status# ERROR ClientId# [29:2191:37] ServerId# [0:0:0] PipeClient# [29:2191:37] 2026-01-10T11:55:13.728752Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2192:37] Status# ERROR ClientId# [30:2192:37] ServerId# [0:0:0] PipeClient# [30:2192:37] 2026-01-10T11:55:13.728790Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2193:37] Status# ERROR ClientId# [31:2193:37] ServerId# [0:0:0] PipeClient# [31:2193:37] 2026-01-10T11:55:13.728833Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2194:37] Status# ERROR ClientId# [32:2194:37] ServerId# [0:0:0] PipeClient# [32:2194:37] 2026-01-10T11:55:14.269073Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.418924s 2026-01-10T11:55:14.269780Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.419668s 2026-01-10T11:55:14.307247Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2264:79] expected 1 current 0 2026-01-10T11:55:14.307319Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2265:38] expected 1 current 0 2026-01-10T11:55:14.307348Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2266:38] expected 1 current 0 2026-01-10T11:55:14.307376Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2267:38] expected 1 current 0 2026-01-10T11:55:14.307402Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2268:38] expected 1 current 0 2026-01-10T11:55:14.307429Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2269:38] expected 1 current 0 2026-01-10T11:55:14.307456Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [ ... 015:5:0:4:0] DiskIsOk# true 2026-01-10T11:55:28.879543Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483669 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.879568Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483669 VDiskId# [80000015:5:0:5:0] DiskIsOk# true 2026-01-10T11:55:28.879594Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483669 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.879616Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483669 VDiskId# [80000015:5:0:6:0] DiskIsOk# true 2026-01-10T11:55:28.882924Z 1 05h15m00.121504s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483669 Items# [80000015:5:0:7:0]: 8:1000:1013 -> 17:1000:1013 ConfigTxSeqNo# 501 2026-01-10T11:55:28.882959Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483669 Success# true 2026-01-10T11:55:28.883076Z 17 05h15m00.121504s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-10T11:55:28.883118Z 17 05h15m00.121504s :BS_NODE DEBUG: [17] VDiskId# [80000015:6:0:7:0] PDiskId# 1000 VSlotId# 1013 created 2026-01-10T11:55:28.883171Z 17 05h15m00.121504s :BS_NODE DEBUG: [17] VDiskId# [80000015:6:0:7:0] status changed to INIT_PENDING 2026-01-10T11:55:28.883256Z 6 05h15m00.121504s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-01-10T11:55:28.883302Z 6 05h15m00.121504s :BS_NODE DEBUG: [6] VDiskId# [80000015:5:0:2:0] -> [80000015:6:0:2:0] 2026-01-10T11:55:28.883358Z 8 05h15m00.121504s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.883425Z 9 05h15m00.121504s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:28.883468Z 9 05h15m00.121504s :BS_NODE DEBUG: [9] VDiskId# [80000015:5:0:0:0] -> [80000015:6:0:0:0] 2026-01-10T11:55:28.883534Z 10 05h15m00.121504s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2026-01-10T11:55:28.883573Z 10 05h15m00.121504s :BS_NODE DEBUG: [10] VDiskId# [80000015:5:0:1:0] -> [80000015:6:0:1:0] 2026-01-10T11:55:28.883639Z 12 05h15m00.121504s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-01-10T11:55:28.883676Z 12 05h15m00.121504s :BS_NODE DEBUG: [12] VDiskId# [80000015:5:0:3:0] -> [80000015:6:0:3:0] 2026-01-10T11:55:28.883742Z 13 05h15m00.121504s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-10T11:55:28.883782Z 13 05h15m00.121504s :BS_NODE DEBUG: [13] VDiskId# [80000015:5:0:4:0] -> [80000015:6:0:4:0] 2026-01-10T11:55:28.883846Z 31 05h15m00.121504s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2026-01-10T11:55:28.883889Z 31 05h15m00.121504s :BS_NODE DEBUG: [31] VDiskId# [80000015:5:0:5:0] -> [80000015:6:0:5:0] 2026-01-10T11:55:28.883964Z 14 05h15m00.121504s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-01-10T11:55:28.884008Z 14 05h15m00.121504s :BS_NODE DEBUG: [14] VDiskId# [80000015:5:0:6:0] -> [80000015:6:0:6:0] 2026-01-10T11:55:28.884287Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483705 2026-01-10T11:55:28.885101Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885152Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:0:0] DiskIsOk# true 2026-01-10T11:55:28.885192Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885220Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:1:0] DiskIsOk# true 2026-01-10T11:55:28.885250Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885279Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:2:0] DiskIsOk# true 2026-01-10T11:55:28.885310Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885341Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:3:0] DiskIsOk# true 2026-01-10T11:55:28.885374Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885404Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:4:0] DiskIsOk# true 2026-01-10T11:55:28.885436Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885465Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:6:0] DiskIsOk# true 2026-01-10T11:55:28.885491Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:28.885519Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:3:0:7:0] DiskIsOk# true 2026-01-10T11:55:28.888905Z 1 05h15m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483705 Items# [80000039:3:0:5:0]: 8:1000:1015 -> 17:1000:1014 ConfigTxSeqNo# 502 2026-01-10T11:55:28.888942Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483705 Success# true 2026-01-10T11:55:28.889059Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-10T11:55:28.889100Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] VDiskId# [80000039:4:0:5:0] PDiskId# 1000 VSlotId# 1014 created 2026-01-10T11:55:28.889152Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] VDiskId# [80000039:4:0:5:0] status changed to INIT_PENDING 2026-01-10T11:55:28.889236Z 23 05h15m00.122016s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:28.889283Z 23 05h15m00.122016s :BS_NODE DEBUG: [23] VDiskId# [80000039:3:0:2:0] -> [80000039:4:0:2:0] 2026-01-10T11:55:28.889337Z 8 05h15m00.122016s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.889402Z 9 05h15m00.122016s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:28.889441Z 9 05h15m00.122016s :BS_NODE DEBUG: [9] VDiskId# [80000039:3:0:0:0] -> [80000039:4:0:0:0] 2026-01-10T11:55:28.889507Z 10 05h15m00.122016s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2026-01-10T11:55:28.889547Z 10 05h15m00.122016s :BS_NODE DEBUG: [10] VDiskId# [80000039:3:0:1:0] -> [80000039:4:0:1:0] 2026-01-10T11:55:28.889611Z 12 05h15m00.122016s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-01-10T11:55:28.889648Z 12 05h15m00.122016s :BS_NODE DEBUG: [12] VDiskId# [80000039:3:0:3:0] -> [80000039:4:0:3:0] 2026-01-10T11:55:28.889711Z 13 05h15m00.122016s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-10T11:55:28.889751Z 13 05h15m00.122016s :BS_NODE DEBUG: [13] VDiskId# [80000039:3:0:4:0] -> [80000039:4:0:4:0] 2026-01-10T11:55:28.889815Z 15 05h15m00.122016s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-01-10T11:55:28.889853Z 15 05h15m00.122016s :BS_NODE DEBUG: [15] VDiskId# [80000039:3:0:6:0] -> [80000039:4:0:6:0] 2026-01-10T11:55:28.889921Z 16 05h15m00.122016s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2026-01-10T11:55:28.889960Z 16 05h15m00.122016s :BS_NODE DEBUG: [16] VDiskId# [80000039:3:0:7:0] -> [80000039:4:0:7:0] 2026-01-10T11:55:28.890702Z 17 05h15m01.274504s :BS_NODE DEBUG: [17] VDiskId# [80000015:6:0:7:0] status changed to REPLICATING 2026-01-10T11:55:28.891050Z 17 05h15m01.320016s :BS_NODE DEBUG: [17] VDiskId# [80000039:4:0:5:0] status changed to REPLICATING 2026-01-10T11:55:28.891365Z 16 05h15m01.586480s :BS_NODE DEBUG: [16] VDiskId# [80000025:8:0:5:0] status changed to REPLICATING 2026-01-10T11:55:28.891712Z 17 05h15m01.756456s :BS_NODE DEBUG: [17] VDiskId# [8000001d:6:0:7:0] status changed to REPLICATING 2026-01-10T11:55:28.892176Z 27 05h15m01.989920s :BS_NODE DEBUG: [27] VDiskId# [80000019:5:0:2:0] status changed to REPLICATING 2026-01-10T11:55:28.892542Z 16 05h15m03.938992s :BS_NODE DEBUG: [16] VDiskId# [80000026:5:0:2:0] status changed to REPLICATING 2026-01-10T11:55:28.892902Z 17 05h15m04.561944s :BS_NODE DEBUG: [17] VDiskId# [8000002d:6:0:7:0] status changed to REPLICATING 2026-01-10T11:55:28.893366Z 16 05h15m04.703432s :BS_NODE DEBUG: [16] VDiskId# [8000002b:6:0:0:0] status changed to REPLICATING 2026-01-10T11:55:28.894598Z 17 05h15m05.148968s :BS_NODE DEBUG: [17] VDiskId# [80000035:6:0:7:0] status changed to REPLICATING 2026-01-10T11:55:28.895205Z 16 05h15m09.950480s :BS_NODE DEBUG: [16] VDiskId# [80000025:8:0:5:0] status changed to READY 2026-01-10T11:55:28.896205Z 8 05h15m09.950992s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.896262Z 8 05h15m09.950992s :BS_NODE DEBUG: [8] VDiskId# [80000025:7:0:5:0] destroyed 2026-01-10T11:55:28.896662Z 17 05h15m13.204944s :BS_NODE DEBUG: [17] VDiskId# [8000002d:6:0:7:0] status changed to READY 2026-01-10T11:55:28.897596Z 8 05h15m13.205456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.897641Z 8 05h15m13.205456s :BS_NODE DEBUG: [8] VDiskId# [8000002d:5:0:7:0] destroyed 2026-01-10T11:55:28.897762Z 17 05h15m14.264016s :BS_NODE DEBUG: [17] VDiskId# [80000039:4:0:5:0] status changed to READY 2026-01-10T11:55:28.898543Z 8 05h15m14.264528s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.898584Z 8 05h15m14.264528s :BS_NODE DEBUG: [8] VDiskId# [80000039:3:0:5:0] destroyed 2026-01-10T11:55:28.899269Z 17 05h15m22.802968s :BS_NODE DEBUG: [17] VDiskId# [80000035:6:0:7:0] status changed to READY 2026-01-10T11:55:28.900098Z 8 05h15m22.803480s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.900139Z 8 05h15m22.803480s :BS_NODE DEBUG: [8] VDiskId# [80000035:5:0:7:0] destroyed 2026-01-10T11:55:28.900274Z 16 05h15m24.893432s :BS_NODE DEBUG: [16] VDiskId# [8000002b:6:0:0:0] status changed to READY 2026-01-10T11:55:28.900976Z 8 05h15m24.893944s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.901016Z 8 05h15m24.893944s :BS_NODE DEBUG: [8] VDiskId# [8000002b:5:0:0:0] destroyed 2026-01-10T11:55:28.901519Z 17 05h15m25.792504s :BS_NODE DEBUG: [17] VDiskId# [80000015:6:0:7:0] status changed to READY 2026-01-10T11:55:28.902298Z 8 05h15m25.793016s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.902341Z 8 05h15m25.793016s :BS_NODE DEBUG: [8] VDiskId# [80000015:5:0:7:0] destroyed 2026-01-10T11:55:28.902675Z 27 05h15m29.016920s :BS_NODE DEBUG: [27] VDiskId# [80000019:5:0:2:0] status changed to READY 2026-01-10T11:55:28.903314Z 8 05h15m29.017432s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.903354Z 8 05h15m29.017432s :BS_NODE DEBUG: [8] VDiskId# [80000019:4:0:2:0] destroyed 2026-01-10T11:55:28.903629Z 17 05h15m29.774456s :BS_NODE DEBUG: [17] VDiskId# [8000001d:6:0:7:0] status changed to READY 2026-01-10T11:55:28.904381Z 8 05h15m29.774968s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.904423Z 8 05h15m29.774968s :BS_NODE DEBUG: [8] VDiskId# [8000001d:5:0:7:0] destroyed 2026-01-10T11:55:28.904992Z 16 05h15m30.201992s :BS_NODE DEBUG: [16] VDiskId# [80000026:5:0:2:0] status changed to READY 2026-01-10T11:55:28.905651Z 8 05h15m30.202504s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-01-10T11:55:28.905688Z 8 05h15m30.202504s :BS_NODE DEBUG: [8] VDiskId# [80000026:4:0:2:0] destroyed |80.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.6%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |80.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 8811052822292218122 2026-01-10T11:55:31.451957Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452092Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452132Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452190Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452232Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452278Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.452330Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.453637Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453721Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453782Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453845Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453899Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453949Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.453997Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.454079Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454141Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454182Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454246Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454300Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454354Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.454404Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.456363Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456447Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456497Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456572Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456623Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456681Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.456731Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |80.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |80.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 5177496532059216627 2026-01-10T11:55:31.591631Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.591776Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.591832Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.591874Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.591914Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.591953Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.592010Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.592051Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.593496Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593609Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593670Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593738Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593838Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593918Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.593985Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.594045Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.594142Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594229Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594301Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594347Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594388Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594439Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594490Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.594539Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:31.597055Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597170Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597235Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597307Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597370Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597432Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597499Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:31.597554Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> BlobDepot::CheckIntegrity [GOOD] |80.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> CheckIntegrityBlock42::PlacementOkWithErrors >> CheckIntegrityBlock42::PlacementBlobIsLost >> CheckIntegrityBlock42::PlacementOk >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> CheckIntegrityBlock42::DataOk >> CheckIntegrityMirror3of4::PlacementOk >> CheckIntegrityBlock42::PlacementWrongDisks |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk >> CheckIntegrityMirror3dc::PlacementBlobIsLost |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |80.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts >> CheckIntegrityMirror3dc::PlacementOkWithErrors >> CheckIntegrityMirror3dc::PlacementOk |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::CheckIntegrity [GOOD] Test command err: Mersenne random seed 2964576227 RandomSeed# 7000822962923849767 Mersenne random seed 548066987 Mersenne random seed 2026616110 Mersenne random seed 3495001792 Mersenne random seed 3503797818 2026-01-10T11:55:01.077142Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077245Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077281Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077316Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077350Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077385Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077428Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077462Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.077660Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [631627fe3093e836] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2026-01-10T11:55:01.078385Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078487Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078522Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078557Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078592Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078631Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078664Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.078696Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089215Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089348Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089385Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089424Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089456Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089487Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089518Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089549Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T11:55:01.089694Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [4e4e439f47457d85] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 2944945459 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2026-01-10T11:55:02.192978Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193211Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193285Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193356Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193410Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193461Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193513Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2026-01-10T11:55:02.193569Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2026-01-10T11:55:02.215463Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215709Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215769Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215818Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215867Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215914Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.215966Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2026-01-10T11:55:02.216016Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 2259888291 Read over the barrier, blob id# [101:1:2:2:11508156:546:0] Read over the barrier, blob id# [101:1:2:2:11508156:546:0] Read over the barrier, blob id# [101:1:2:2:11508156:546:0] Read over the barrier, blob id# [101:3:5:1:1916888:944:0] Read over the barrier, blob id# [101:3:5:1:191 ... ead over the barrier, blob id# [15:1:1:1:1769263:456:0] Read over the barrier, blob id# [16:2:7:0:644056:541:0] Read over the barrier, blob id# [16:2:2:1:14587906:229:0] Read over the barrier, blob id# [16:1:1:0:2730553:345:0] Read over the barrier, blob id# [16:2:7:0:644056:541:0] Read over the barrier, blob id# [16:1:2:1:7684826:181:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [17:4:2:0:12338149:101:0] TEvRange returned collected blob with id# [16:1:2:2:11068036:849:0] TEvRange returned collected blob with id# [16:2:7:2:3385067:366:0] Read over the barrier, blob id# [16:2:2:1:14587906:229:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [15:2:8:2:3616382:385:0] TEvRange returned collected blob with id# [15:2:4:2:5517959:415:0] TEvRange returned collected blob with id# [15:2:4:2:7214452:907:0] TEvRange returned collected blob with id# [15:2:8:2:3616382:385:0] TEvRange returned collected blob with id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [16:2:7:2:3385067:366:0] Read over the barrier, blob id# [16:2:7:2:3385067:366:0] Read over the barrier, blob id# [16:1:2:2:11068036:849:0] Read over the barrier, blob id# [17:4:2:0:12338149:101:0] TEvRange returned collected blob with id# [16:1:2:2:11068036:849:0] TEvRange returned collected blob with id# [16:2:7:2:3385067:366:0] 2026-01-10T11:55:28.868275Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.869488Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.869666Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.869827Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.869987Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.870137Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.870288Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.870426Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 13 9 soft] barrier# 2:0 new key# [17 1 23 6 soft] barrier# 1:0 2026-01-10T11:55:28.900093Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.900374Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.900516Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.900651Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.900805Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.900940Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.901067Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 2026-01-10T11:55:28.901204Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 18 0 soft] barrier# 3:1 new key# [16 1 22 0 soft] barrier# 3:0 Read over the barrier, blob id# [16:2:7:2:3385067:366:0] Read over the barrier, blob id# [15:2:8:1:5721615:883:0] Read over the barrier, blob id# [15:2:4:2:5517959:415:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [16:1:1:0:2730553:345:0] 2026-01-10T11:55:28.967481Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.967920Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.968203Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.968512Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.968657Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.968982Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.969310Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 2026-01-10T11:55:28.969619Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 19 1 soft] barrier# 3:0 new key# [15 2 21 2 soft] barrier# 2:0 Read over the barrier, blob id# [15:2:4:2:5517959:415:0] Read over the barrier, blob id# [15:2:4:2:5517959:415:0] Read over the barrier, blob id# [15:2:8:2:3616382:385:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [17:5:3:0:7413518:721:0] Read over the barrier, blob id# [17:4:2:0:9454570:692:0] Read over the barrier, blob id# [15:2:7:0:8772722:375:0] Read over the barrier, blob id# [15:2:4:2:5517959:415:0] Read over the barrier, blob id# [15:2:8:1:5721615:883:0] Read over the barrier, blob id# [15:2:7:0:8772722:375:0] Read over the barrier, blob id# [15:1:1:1:1769263:456:0] TEvRange returned collected blob with id# [16:2:7:2:3385067:366:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] TEvRange returned collected blob with id# [17:4:3:2:11474073:767:0] Read over the barrier, blob id# [15:2:7:0:8772722:375:0] Read over the barrier, blob id# [15:2:7:0:8772722:375:0] Read over the barrier, blob id# [15:2:8:2:3616382:385:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [15:1:1:1:1769263:456:0] Read over the barrier, blob id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [17:5:4:2:7625595:769:0] Read over the barrier, blob id# [17:5:4:2:7625595:769:0] Read over the barrier, blob id# [17:5:4:2:7625595:769:0] Read over the barrier, blob id# [17:2:1:0:7994539:984:0] Read over the barrier, blob id# [17:5:3:0:7413518:721:0] Read over the barrier, blob id# [17:4:3:2:11474073:767:0] Read over the barrier, blob id# [17:5:6:0:5306110:345:0] Read over the barrier, blob id# [15:2:8:1:830346:221:0] Read over the barrier, blob id# [15:2:6:1:1806964:276:0] Read over the barrier, blob id# [15:2:6:1:1806964:276:0] Read over the barrier, blob id# [15:2:6:1:1806964:276:0] Read over the barrier, blob id# [15:2:4:2:5517959:415:0] Read over the barrier, blob id# [15:2:8:1:5721615:883:0] Read over the barrier, blob id# [15:2:6:1:1806964:276:0] TEvRange returned collected blob with id# [15:2:6:1:1806964:276:0] TEvRange returned collected blob with id# [15:2:8:1:830346:221:0] TEvRange returned collected blob with id# [15:2:8:1:2250698:739:0] TEvRange returned collected blob with id# [15:2:8:1:5721615:883:0] TEvRange returned collected blob with id# [15:2:4:2:5517959:415:0] TEvRange returned collected blob with id# [15:2:4:2:7214452:907:0] Read over the barrier, blob id# [15:2:6:1:1806964:276:0] Read over the barrier, blob id# [15:1:3:2:13681207:915:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [17:4:3:2:11474073:767:0] Read over the barrier, blob id# [17:4:3:2:11474073:767:0] Read over the barrier, blob id# [15:2:7:0:8772722:375:0] Read over the barrier, blob id# [15:1:3:2:13681207:915:0] Read over the barrier, blob id# [15:2:8:2:13327670:634:0] Read over the barrier, blob id# [17:4:3:2:11474073:767:0] Mersenne random seed 2673911274 ErrorReason DataInfo Disks: 0: [82000000:1:0:2:0] 1: [82000000:1:0:3:0] 2: [82000000:1:0:4:0] 3: [82000000:1:0:5:0] 4: [82000000:1:0:6:0] 5: [82000000:1:0:7:0] 6: [82000000:1:0:0:0] 7: [82000000:1:0:1:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ErrorReason DataInfo [72075186224037888:1:1:2:1:100:0] Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> Donor::CheckOnlineReadRequestToDonor >> Donor::SkipBadDonor >> CheckIntegrityBlock42::PlacementOkWithErrors [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks >> Donor::SlayAfterWiping >> Donor::ContinueWithFaultyDonor |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> CheckIntegrityMirror3of4::PlacementOk [GOOD] >> CheckIntegrityMirror3of4::PlacementMissingParts >> CheckIntegrityBlock42::PlacementWrongDisks [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy >> CheckIntegrityBlock42::DataOk [GOOD] >> CheckIntegrityBlock42::DataOkAdditionalEqualParts >> CheckIntegrityBlock42::PlacementOk [GOOD] >> CheckIntegrityBlock42::PlacementOkHandoff >> CheckIntegrityBlock42::PlacementBlobIsLost [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.7%| [TM] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |80.7%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.7%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage >> CheckIntegrityMirror3dc::PlacementOkWithErrors [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks >> BSCStopPDisk::PDiskStop |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> CheckIntegrityMirror3dc::PlacementBlobIsLost [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated >> CheckIntegrityMirror3dc::PlacementOk [GOOD] >> CheckIntegrityMirror3dc::PlacementOkHandoff >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 11506148822933036996 SEND TEvPut with key [1:1:1:0:0:100:0] 2026-01-10T11:55:31.998208Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T11:55:31.998554Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-01-10T11:55:32.046945Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 8376305607630189984 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2026-01-10T11:55:32.857158Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-10T11:55:32.857388Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2026-01-10T11:55:32.857536Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-10T11:55:32.857673Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2026-01-10T11:55:32.857810Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-10T11:55:32.857934Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-10T11:55:32.858057Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 23745032308326380 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-10T11:55:32.469011Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-10T11:55:32.469272Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2026-01-10T11:55:32.469469Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-10T11:55:32.469616Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2026-01-10T11:55:32.469783Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-10T11:55:32.469977Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-10T11:55:32.470093Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> CheckIntegrityMirror3of4::PlacementMissingParts [GOOD] >> CheckIntegrityMirror3of4::PlacementDisintegrated |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.8%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityBlock42::PlacementStatusUnknown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 5603092918323018905 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-10T11:55:32.617904Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 8078738744128080070 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2026-01-10T11:55:32.930192Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6348:836] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 16437111030085236021 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2026-01-10T11:55:32.496983Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T11:55:32.497474Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-01-10T11:55:32.619032Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 12311866224249230041 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2026-01-10T11:55:32.709620Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> CheckIntegrityBlock42::PlacementAllOnHandoff [GOOD] >> CheckIntegrityBlock42::PlacementDisintegrated |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> CheckIntegrityBlock42::DataOkAdditionalEqualParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken >> CheckIntegrityBlock42::PlacementOkHandoff [GOOD] >> CheckIntegrityBlock42::PlacementMissingParts >> BSCStopPDisk::PDiskStop [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated [GOOD] >> CheckIntegrityMirror3dc::DataOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 6462514333754300289 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2026-01-10T11:55:14.924088Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-01-10T11:55:14.924209Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-01-10T11:55:14.924659Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-01-10T11:55:14.924679Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-01-10T11:55:14.924799Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-01-10T11:55:14.924814Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-01-10T11:55:14.924920Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-01-10T11:55:14.924934Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-01-10T11:55:14.925051Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-01-10T11:55:14.925064Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-01-10T11:55:14.925085Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-01-10T11:55:14.925102Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-01-10T11:55:14.925123Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-01-10T11:55:14.925236Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-01-10T11:55:14.925344Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-01-10T11:55:14.925549Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-01-10T11:55:14.925670Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-01-10T11:55:14.925686Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-01-10T11:55:14.925708Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-01-10T11:55:14.925721Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-01-10T11:55:14.926534Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-01-10T11:55:14.926555Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-01-10T11:55:14.926578Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-01-10T11:55:14.926591Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-01-10T11:55:14.926709Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-01-10T11:55:14.926722Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-01-10T11:55:14.927009Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-01-10T11:55:14.927025Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-01-10T11:55:14.927140Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-01-10T11:55:14.927260Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-01-10T11:55:14.927286Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-01-10T11:55:14.927415Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-01-10T11:55:14.927617Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-01-10T11:55:14.927632Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-01-10T11:55:14.927743Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-01-10T11:55:14.927757Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-01-10T11:55:14.927777Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-01-10T11:55:14.927790Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-01-10T11:55:14.927980Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-01-10T11:55:14.927993Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-01-10T11:55:14.928120Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-01-10T11:55:14.928134Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-01-10T11:55:14.928155Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-01-10T11:55:14.928267Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-01-10T11:55:14.928382Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-01-10T11:55:14.928587Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-01-10T11:55:14.928613Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-01-10T11:55:14.928626Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-01-10T11:55:14.928646Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-01-10T11:55:14.928751Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-01-10T11:55:14.928773Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-01-10T11:55:14.928785Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-01-10T11:55:14.928818Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-01-10T11:55:14.928834Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-01-10T11:55:14.928861Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-01-10T11:55:14.928875Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-01-10T11:55:14.928895Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-01-10T11:55:14.928909Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-01-10T11:55:14.928941Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-01-10T11:55:14.928955Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-01-10T11:55:14.929069Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-01-10T11:55:14.929181Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-01-10T11:55:14.929206Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-01-10T11:55:14.929220Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-01-10T11:55:14.929310Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-01-10T11:55:14.929324Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-01-10T11:55:14.929513Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-01-10T11:55:14.929533Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-01-10T11:55:14.929645Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-01-10T11:55:14.929754Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-01-10T11:55:14.929867Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-01-10T11:55:14.929879Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-01-10T11:55:15.026687Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-01-10T11:55:15.028546Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-01-10T11:55:15.028583Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-01-10T11:55:15.028608Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-01-10T11:55:15.028828Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-01-10T11:55:15.028851Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-01-10T11:55:15.028873Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-01-10T11:55:15.028895Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-01-10T11:55:15.029012Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-01-10T11:55:15.029036Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-01-10T11:55:15.029061Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-01-10T11:55:15.029083Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-01-10T11:55:15.029195Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-01-10T11:55:15.029423Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-01-10T11:55:15.029557Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-01-10T11:55:15.029579Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-01-10T11:55:15.029697Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-01-10T11:55:15.029822Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-01-10T11:55:15.029850Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-01-10T11:55:15.029873Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-01-10T11:55:15.029896Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-01-10T11:55:15.030264Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-01-10T11:55:15.030294Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-01-10T11:55:15.030319Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-01-10T11:55:15.030342Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-01-10T11:55:15.030366Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-01-10T11:55:15.030479Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-01-10T11:55:15.030505Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-01-10T11:55:15.030715Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-01-10T11:55:15.030857Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-01-10T11:55:15.031064Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-01-10T11:55:15.031089Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-01-10T11:55:15.031122Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-01-10T11:55:15.031146Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-01-10T11:55:15.031251Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... FHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483774 Items# [8000007e:3:1:2:0]: 23:1002:1008 -> 13:1002:1009 ConfigTxSeqNo# 521 2026-01-10T11:55:38.167945Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483774 Success# true 2026-01-10T11:55:38.168131Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-10T11:55:38.168203Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] VDiskId# [8000007e:3:1:1:0] -> [8000007e:4:1:1:0] 2026-01-10T11:55:38.168314Z 36 05h45m00.123040s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-01-10T11:55:38.168716Z 36 05h45m00.123040s :BS_NODE DEBUG: [36] VDiskId# [8000007e:3:2:1:0] -> [8000007e:4:2:1:0] 2026-01-10T11:55:38.168817Z 3 05h45m00.123040s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-01-10T11:55:38.168869Z 3 05h45m00.123040s :BS_NODE DEBUG: [3] VDiskId# [8000007e:3:0:2:0] -> [8000007e:4:0:2:0] 2026-01-10T11:55:38.168969Z 21 05h45m00.123040s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-01-10T11:55:38.169022Z 21 05h45m00.123040s :BS_NODE DEBUG: [21] VDiskId# [8000007e:3:1:0:0] -> [8000007e:4:1:0:0] 2026-01-10T11:55:38.169091Z 23 05h45m00.123040s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.169183Z 9 05h45m00.123040s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-01-10T11:55:38.169239Z 9 05h45m00.123040s :BS_NODE DEBUG: [9] VDiskId# [8000007e:3:0:0:0] -> [8000007e:4:0:0:0] 2026-01-10T11:55:38.169333Z 27 05h45m00.123040s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2026-01-10T11:55:38.169389Z 27 05h45m00.123040s :BS_NODE DEBUG: [27] VDiskId# [8000007e:3:2:2:0] -> [8000007e:4:2:2:0] 2026-01-10T11:55:38.169484Z 12 05h45m00.123040s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-01-10T11:55:38.169540Z 12 05h45m00.123040s :BS_NODE DEBUG: [12] VDiskId# [8000007e:3:0:1:0] -> [8000007e:4:0:1:0] 2026-01-10T11:55:38.169646Z 13 05h45m00.123040s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-01-10T11:55:38.169695Z 13 05h45m00.123040s :BS_NODE DEBUG: [13] VDiskId# [8000007e:4:1:2:0] PDiskId# 1002 VSlotId# 1009 created 2026-01-10T11:55:38.169788Z 13 05h45m00.123040s :BS_NODE DEBUG: [13] VDiskId# [8000007e:4:1:2:0] status changed to INIT_PENDING 2026-01-10T11:55:38.169907Z 33 05h45m00.123040s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-01-10T11:55:38.169966Z 33 05h45m00.123040s :BS_NODE DEBUG: [33] VDiskId# [8000007e:3:2:0:0] -> [8000007e:4:2:0:0] 2026-01-10T11:55:38.170332Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483689 2026-01-10T11:55:38.171376Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171434Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:0:0] DiskIsOk# true 2026-01-10T11:55:38.171482Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171517Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:1:0] DiskIsOk# true 2026-01-10T11:55:38.171552Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171586Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:2:0] DiskIsOk# true 2026-01-10T11:55:38.171620Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171653Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:1:1:0] DiskIsOk# true 2026-01-10T11:55:38.171685Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171715Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:1:2:0] DiskIsOk# true 2026-01-10T11:55:38.171749Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171781Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:0:0] DiskIsOk# true 2026-01-10T11:55:38.171815Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171847Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:1:0] DiskIsOk# true 2026-01-10T11:55:38.171880Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2026-01-10T11:55:38.171910Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:2:0] DiskIsOk# true 2026-01-10T11:55:38.178571Z 1 05h45m00.123552s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483689 Items# [80000029:1:1:0:0]: 23:1002:1002 -> 22:1002:1016 ConfigTxSeqNo# 522 2026-01-10T11:55:38.178618Z 1 05h45m00.123552s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483689 Success# true 2026-01-10T11:55:38.178772Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-01-10T11:55:38.178841Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] VDiskId# [80000029:1:1:2:0] -> [80000029:2:1:2:0] 2026-01-10T11:55:38.178939Z 35 05h45m00.123552s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2026-01-10T11:55:38.178984Z 35 05h45m00.123552s :BS_NODE DEBUG: [35] VDiskId# [80000029:1:2:0:0] -> [80000029:2:2:0:0] 2026-01-10T11:55:38.179067Z 2 05h45m00.123552s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-01-10T11:55:38.179111Z 2 05h45m00.123552s :BS_NODE DEBUG: [2] VDiskId# [80000029:1:0:1:0] -> [80000029:2:0:1:0] 2026-01-10T11:55:38.179193Z 5 05h45m00.123552s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-01-10T11:55:38.179239Z 5 05h45m00.123552s :BS_NODE DEBUG: [5] VDiskId# [80000029:1:0:2:0] -> [80000029:2:0:2:0] 2026-01-10T11:55:38.179325Z 22 05h45m00.123552s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2026-01-10T11:55:38.179366Z 22 05h45m00.123552s :BS_NODE DEBUG: [22] VDiskId# [80000029:2:1:0:0] PDiskId# 1002 VSlotId# 1016 created 2026-01-10T11:55:38.179436Z 22 05h45m00.123552s :BS_NODE DEBUG: [22] VDiskId# [80000029:2:1:0:0] status changed to INIT_PENDING 2026-01-10T11:55:38.179507Z 23 05h45m00.123552s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.179583Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2026-01-10T11:55:38.179630Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] VDiskId# [80000029:1:2:1:0] -> [80000029:2:2:1:0] 2026-01-10T11:55:38.179712Z 11 05h45m00.123552s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2026-01-10T11:55:38.179754Z 11 05h45m00.123552s :BS_NODE DEBUG: [11] VDiskId# [80000029:1:0:0:0] -> [80000029:2:0:0:0] 2026-01-10T11:55:38.179832Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2026-01-10T11:55:38.179876Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] VDiskId# [80000029:1:2:2:0] -> [80000029:2:2:2:0] 2026-01-10T11:55:38.179957Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-01-10T11:55:38.180000Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] VDiskId# [80000029:1:1:1:0] -> [80000029:2:1:1:0] 2026-01-10T11:55:38.181089Z 22 05h45m01.439968s :BS_NODE DEBUG: [22] VDiskId# [80000009:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.181747Z 22 05h45m01.641504s :BS_NODE DEBUG: [22] VDiskId# [80000059:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.182429Z 13 05h45m01.681040s :BS_NODE DEBUG: [13] VDiskId# [8000007e:4:1:2:0] status changed to REPLICATING 2026-01-10T11:55:38.182847Z 22 05h45m01.855992s :BS_NODE DEBUG: [22] VDiskId# [80000069:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.183571Z 22 05h45m01.964456s :BS_NODE DEBUG: [22] VDiskId# [80000019:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.184371Z 22 05h45m02.145016s :BS_NODE DEBUG: [22] VDiskId# [80000049:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.185140Z 22 05h45m04.316552s :BS_NODE DEBUG: [22] VDiskId# [80000029:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.185878Z 22 05h45m04.452480s :BS_NODE DEBUG: [22] VDiskId# [80000079:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.186713Z 22 05h45m04.533528s :BS_NODE DEBUG: [22] VDiskId# [80000039:2:1:0:0] status changed to REPLICATING 2026-01-10T11:55:38.188909Z 22 05h45m13.296552s :BS_NODE DEBUG: [22] VDiskId# [80000029:2:1:0:0] status changed to READY 2026-01-10T11:55:38.190399Z 23 05h45m13.297064s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.190468Z 23 05h45m13.297064s :BS_NODE DEBUG: [23] VDiskId# [80000029:1:1:0:0] destroyed 2026-01-10T11:55:38.191763Z 22 05h45m25.240968s :BS_NODE DEBUG: [22] VDiskId# [80000009:2:1:0:0] status changed to READY 2026-01-10T11:55:38.193212Z 23 05h45m25.241480s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.193278Z 23 05h45m25.241480s :BS_NODE DEBUG: [23] VDiskId# [80000009:1:1:0:0] destroyed 2026-01-10T11:55:38.193446Z 22 05h45m25.838456s :BS_NODE DEBUG: [22] VDiskId# [80000019:2:1:0:0] status changed to READY 2026-01-10T11:55:38.194726Z 23 05h45m25.838968s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.194779Z 23 05h45m25.838968s :BS_NODE DEBUG: [23] VDiskId# [80000019:1:1:0:0] destroyed 2026-01-10T11:55:38.195186Z 22 05h45m29.995528s :BS_NODE DEBUG: [22] VDiskId# [80000039:2:1:0:0] status changed to READY 2026-01-10T11:55:38.196244Z 23 05h45m29.996040s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.196292Z 23 05h45m29.996040s :BS_NODE DEBUG: [23] VDiskId# [80000039:1:1:0:0] destroyed 2026-01-10T11:55:38.197135Z 22 05h45m30.232504s :BS_NODE DEBUG: [22] VDiskId# [80000059:2:1:0:0] status changed to READY 2026-01-10T11:55:38.198177Z 23 05h45m30.233016s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.198223Z 23 05h45m30.233016s :BS_NODE DEBUG: [23] VDiskId# [80000059:1:1:0:0] destroyed 2026-01-10T11:55:38.198342Z 22 05h45m30.801480s :BS_NODE DEBUG: [22] VDiskId# [80000079:2:1:0:0] status changed to READY 2026-01-10T11:55:38.199464Z 23 05h45m30.801992s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.199519Z 23 05h45m30.801992s :BS_NODE DEBUG: [23] VDiskId# [80000079:1:1:0:0] destroyed 2026-01-10T11:55:38.199663Z 13 05h45m30.981040s :BS_NODE DEBUG: [13] VDiskId# [8000007e:4:1:2:0] status changed to READY 2026-01-10T11:55:38.200496Z 23 05h45m30.981552s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.200550Z 23 05h45m30.981552s :BS_NODE DEBUG: [23] VDiskId# [8000007e:3:1:2:0] destroyed 2026-01-10T11:55:38.201084Z 22 05h45m36.106992s :BS_NODE DEBUG: [22] VDiskId# [80000069:2:1:0:0] status changed to READY 2026-01-10T11:55:38.202334Z 23 05h45m36.107504s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.202388Z 23 05h45m36.107504s :BS_NODE DEBUG: [23] VDiskId# [80000069:1:1:0:0] destroyed 2026-01-10T11:55:38.202553Z 22 05h45m36.304016s :BS_NODE DEBUG: [22] VDiskId# [80000049:2:1:0:0] status changed to READY 2026-01-10T11:55:38.203724Z 23 05h45m36.304528s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2026-01-10T11:55:38.203779Z 23 05h45m36.304528s :BS_NODE DEBUG: [23] VDiskId# [80000049:1:1:0:0] destroyed |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> CheckIntegrityMirror3dc::PlacementOkHandoff [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken [GOOD] >> CheckIntegrityBlock42::DataOkErasureFiveParts |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.8%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom >> CheckIntegrityBlock42::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] Test command err: RandomSeed# 4617819795881314712 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** Group is disintegrated or has network problems |80.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 6815899018730577929 2026-01-10T11:55:39.942890Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:39.945098Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2387431003661459058] 2026-01-10T11:55:39.966737Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2026-01-10T11:55:39.966974Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] |80.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] Test command err: RandomSeed# 10297185632329312194 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> Donor::MultipleEvicts [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] |80.9%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] Test command err: RandomSeed# 10516652201680050453 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** >> CheckIntegrityMirror3dc::DataOk [GOOD] |80.9%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] Test command err: RandomSeed# 14624956423785581613 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> Donor::SlayAfterWiping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 9975770185758886134 0 donors: 2026-01-10T11:55:40.641313Z 15 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:40.643632Z 15 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:40.663118Z 15 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 14:1000 2026-01-10T11:55:40.910638Z 14 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:40.913622Z 14 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:40.931970Z 14 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2026-01-10T11:55:41.013170Z 15 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.016067Z 15 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.029071Z 15 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 14:1000 2026-01-10T11:55:41.129211Z 14 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.131934Z 14 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.151475Z 14 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2026-01-10T11:55:41.434605Z 15 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.436877Z 15 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.469965Z 15 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 14:1000 2026-01-10T11:55:41.537044Z 14 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.539206Z 14 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.549845Z 14 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2026-01-10T11:55:41.650515Z 15 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.653040Z 15 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.663624Z 15 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 14:1000 2026-01-10T11:55:41.734343Z 14 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.736883Z 14 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.747206Z 14 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2026-01-10T11:55:41.851438Z 15 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.853740Z 15 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 987113245138469422] 2026-01-10T11:55:41.864288Z 15 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:1:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 14:1000 |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] Test command err: RandomSeed# 13653900138272545119 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** >> Donor::ContinueWithFaultyDonor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataOk [GOOD] Test command err: RandomSeed# 2687489434800015639 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ] |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] Test command err: RandomSeed# 10914314377414584445 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 ], ver1 disks [ 2 ] ERROR: There are unequal parts *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ], ver1 disks [ 3 4 5 ] ERROR: There are unequal parts >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] >> Donor::SkipBadDonor [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |80.9%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.9%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 7841214192272321458 2026-01-10T11:55:41.534326Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.536120Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1921903577727307010] 2026-01-10T11:55:41.673922Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 18054097335811085141 2026-01-10T11:55:41.148411Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.150250Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15779774936556308840] 2026-01-10T11:55:41.173650Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] Test command err: RandomSeed# 6469156813078272531 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 0 ] part 2: ver0 disks [ 7 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 5676021624137372412 2026-01-10T11:55:41.428188Z 1 00h01m14.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.431166Z 1 00h01m14.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 49866688975299748] 2026-01-10T11:55:41.634340Z 1 00h01m14.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] Test command err: RandomSeed# 15365991559787885504 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: part 2: part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 1221463675975168800 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |81.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |81.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |81.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |81.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 14383705726565277100 |81.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> BSCReadOnlyPDisk::ReadOnlyOneByOne |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |81.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> BSCReadOnlyPDisk::ReadOnlySlay >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive >> BSCReadOnlyPDisk::ReadOnlyNotAllowed |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |81.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |81.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 16578652365821872781 2026-01-10T11:55:46.607973Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608417Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608450Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608678Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608708Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608736Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.608936Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.609153Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.612366Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.612806Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.612856Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.612904Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.612944Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.612997Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.613056Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.613108Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.613175Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613219Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613263Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613691Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613720Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613746Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.613963Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.614168Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:46.621365Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.621977Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622018Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622055Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622093Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622488Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622701Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:46.622742Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.587280Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2026-01-10T11:55:47.587346Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2026-01-10T11:55:47.587390Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2026-01-10T11:55:47.587427Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2026-01-10T11:55:47.587461Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2026-01-10T11:55:47.587500Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2026-01-10T11:55:47.587537Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 3859198313784827452 2026-01-10T11:55:47.391823Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.392442Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.392965Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.393148Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.393177Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.393558Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.393599Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.396726Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.396966Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397011Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397223Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397426Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397468Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397678Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.397913Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398255Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398452Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398653Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398685Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398848Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.398891Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-01-10T11:55:47.403119Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403209Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403274Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403346Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403403Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403466Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-01-10T11:55:47.403516Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange >> TStateStorageRingGroupState::TestStateStorageDoubleReply >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent >> TStateStorageRingGroupState::TestProxyConfigMismatch >> TStateStorageRingGroupState::TestBoardConfigMismatch |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a >> ClusterBalancing::ClusterBalancingEvenDistribution >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] |81.2%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnce >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] >> TStateStorageRingGroupState::TestStateStorageUpdateSig >> BridgeGet::PartRestorationAcrossBridge >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] >> BridgeGet::PartRestorationAcrossBridgeOnDiscover |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] Test command err: RandomSeed# 15525097820001543562 2026-01-10T11:55:48.827672Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:242:20] SessionId# [7:121:2] Cookie# 8411154786029964126 2026-01-10T11:55:48.834966Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 8411154786029964126 SessionId# [7:121:2] Binding# {4.4/10404555274338420462@[7:77:4]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 4 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.836219Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [4:70:4] Inserted# false Subscription# {SessionId# [4:70:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.836241Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.6:19001/6 2026-01-10T11:55:48.836333Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.7:19001/7 2026-01-10T11:55:48.836352Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.9:19001/9 2026-01-10T11:55:48.836367Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.4:19001/4 2026-01-10T11:55:48.836404Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.1:19001/1 2026-01-10T11:55:48.836425Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.2:19001/2 2026-01-10T11:55:48.836440Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.3:19001/3 2026-01-10T11:55:48.836512Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.836610Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 3 Binding# {3.0/3512889295231523159@[0:0:0]} SessionId# [0:0:0] 2026-01-10T11:55:48.836766Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:277:20] SessionId# [2:120:6] Cookie# 8411154786029964126 2026-01-10T11:55:48.836787Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [2:120:6] Inserted# false Subscription# {SessionId# [2:120:6] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-10T11:55:48.836860Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 8411154786029964126 SessionId# [2:120:6] Binding# {7.0/8411154786029964126@[2:120:6]} Record# {RootNodeId: 4 } 2026-01-10T11:55:48.836938Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [7:277:20] SessionId# [4:76:6] Cookie# 10404555274338420462 2026-01-10T11:55:48.836958Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [4:76:6] Inserted# false Subscription# {SessionId# [4:76:6] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.837119Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 7 Cookie# 10404555274338420462 SessionId# [4:76:6] Binding# {3.0/3512889295231523159@[0:0:0]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 7579678457034418669 2026-01-10T11:55:48.960025Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [3:97:6] SessionId# [0:0:0] Cookie# 2 2026-01-10T11:55:48.960108Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 7 SessionId# [3:97:6] Cookie# 2 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:48.960220Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [7:98:3] Cookie# 12721016807111162211 2026-01-10T11:55:48.960268Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.967413Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 12721016807111162211 SessionId# [7:98:3] Binding# {3.0/12721016807111162211@[7:98:3]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.967496Z 7 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007272s 2026-01-10T11:55:48.967557Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:249:20] SessionId# [2:108:2] Cookie# 13685171250598720449 2026-01-10T11:55:48.967610Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.967783Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 13685171250598720449 SessionId# [2:108:2] Binding# {1.1/1184864886006468501@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.968290Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [1:136:3] Inserted# false Subscription# {SessionId# [1:136:3] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-10T11:55:48.968322Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 4 NodeId# 127.0.0.7:19001/7 2026-01-10T11:55:48.968436Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-10T11:55:48.968497Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 7 Binding# {7.0/14782111158397371184@[0:0:0]} SessionId# [0:0:0] 2026-01-10T11:55:48.969571Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:145:6] SessionId# [0:0:0] Cookie# 7 2026-01-10T11:55:48.969612Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 7 SessionId# [1:145:6] Cookie# 7 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:48.969657Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {7.0/14782111158397371184@[0:0:0]} 2026-01-10T11:55:48.969733Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 1184864886006468501 2026-01-10T11:55:48.969770Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-10T11:55:48.969891Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 1184864886006468501 SessionId# [1:130:1] Binding# {7.0/14782111158397371184@[1:145:6]} Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 17285495446432008110 2026-01-10T11:55:48.650923Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:130:1] SessionId# [0:0:0] Cookie# 9 2026-01-10T11:55:48.650988Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 2 SessionId# [1:130:1] Cookie# 9 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:48.651056Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {2.0/7567893777806305960@[0:0:0]} 2026-01-10T11:55:48.651177Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 7567893777806305959 2026-01-10T11:55:48.651218Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.658193Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 7567893777806305959 SessionId# [6:143:1] Binding# {2.2/9190809792701080412@[6:118:2]} Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.658276Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.658338Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2026-01-10T11:55:48.658382Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-01-10T11:55:48.658415Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-01-10T11:55:48.658464Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-01-10T11:55:48.658546Z 6 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007369s 2026-01-10T11:55:48.658612Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [5:92:3] Cookie# 9719035953285663015 2026-01-10T11:55:48.658655Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [5:92:3] Inserted# false Subscription# {SessionId# [5:92:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.658742Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 9719035953285663015 SessionId# [5:92:3] Binding# {3.1/9719035953285663015@[5:92:3]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.658799Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [1:142:5] Cookie# 7567893777806305959 2026-01-10T11:55:48.658844Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 6 Cookie# 7567893777806305959 SessionId# [1:142:5] Binding# {2.0/7567893777806305960@[1:130:1]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.658877Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 7567893777806305959 2026-01-10T11:55:48.658915Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.658955Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 7567893777806305959 SessionId# [6:143:1] Binding# {2.2/9190809792701080412@[6:118:2]} 2026-01-10T11:55:48.658996Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-01-10T11:55:48.659031Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-01-10T11:55:48.659086Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-01-10T11:55:48.659132Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-01-10T11:55:48.659159Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-01-10T11:55:48.659187Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2026-01-10T11:55:48.659240Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} 2026-01-10T11:55:48.659302Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 2678746374798245979 2026-01-10T11:55:48.659335Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-10T11:55:48.659391Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 2678746374798245979 SessionId# [7:146:1] Binding# {1.2/2678746374798245979@[7:146:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.659428Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 2283320212592307428 2026-01-10T11:55:48.659468Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.659522Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 2283320212592307428 SessionId# [8:149:1] Binding# {1.2/2283320212592307428@[8:149:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.659566Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 9932070300528896068 2026-01-10T11:55:48.659606Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.659654Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 9932070300528896068 SessionId# [9:152:1] Binding# {1.2/9932070300528896068@[9:152:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.659690Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [4:137:1] Cookie# 7035689051738225100 2026-01-10T11:55:48.659720Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-10T11:55:48.659760Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 7035689051738225100 SessionId# [4:137:1] Binding# {1.2/7035689051738225100@[4:137:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.660105Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 7567893777806305960 2026-01-10T11:55:48.660313Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 7567893777806305960 SessionId# [2:131:1] Binding# Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.661743Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [2:117:5] Inserted# false Subscription# {SessionId# [2:117:5] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-10T11:55:48.661787Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.2:1900 ... 1:55:48.714255Z 1 00h00m00.194281s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714285Z 1 00h00m00.194281s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714508Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.714557Z 1 00h00m00.430256s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.714592Z 1 00h00m00.430256s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.714633Z 1 00h00m00.430256s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.714673Z 1 00h00m00.430256s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.714698Z 1 00h00m00.430256s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.714728Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714782Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714813Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714845Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.714872Z 1 00h00m00.430256s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.715136Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.715205Z 1 00h00m00.906925s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.715240Z 1 00h00m00.906925s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.715267Z 1 00h00m00.906925s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.715293Z 1 00h00m00.906925s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.715321Z 1 00h00m00.906925s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.715354Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.715388Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.715418Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.715458Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.715492Z 1 00h00m00.906925s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.716548Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.716615Z 1 00h00m01.955596s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.716659Z 1 00h00m01.955596s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.716688Z 1 00h00m01.955596s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.716715Z 1 00h00m01.955596s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.716747Z 1 00h00m01.955596s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.716779Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.716820Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.716855Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.716883Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.716916Z 1 00h00m01.955596s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.719083Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.719143Z 1 00h00m04.241698s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.719179Z 1 00h00m04.241698s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.719207Z 1 00h00m04.241698s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.719232Z 1 00h00m04.241698s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.719258Z 1 00h00m04.241698s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.719300Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.719339Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.719382Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.719426Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.719455Z 1 00h00m04.241698s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.722236Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.722299Z 1 00h00m09.133956s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.722350Z 1 00h00m09.133956s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.722375Z 1 00h00m09.133956s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.722391Z 1 00h00m09.133956s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.722412Z 1 00h00m09.133956s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.722433Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.722460Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.722483Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.722501Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.722521Z 1 00h00m09.133956s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.723558Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.723629Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:48.723660Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:48.723679Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:48.723696Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:48.723720Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:48.723760Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.723801Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.723824Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 1787426773533554780 2026-01-10T11:55:48.691599Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 6482728283427088358 2026-01-10T11:55:48.691669Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.698995Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 6482728283427088358 SessionId# [4:89:3] Binding# {3.1/6482728283427088358@[4:89:3]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.699091Z 4 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007516s 2026-01-10T11:55:48.699160Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [5:92:3] Cookie# 6583312504212282414 2026-01-10T11:55:48.699206Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [5:92:3] Inserted# false Subscription# {SessionId# [5:92:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.699269Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 6583312504212282414 SessionId# [5:92:3] Binding# {3.1/6583312504212282414@[5:92:3]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.699310Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 6583312504212282414 2026-01-10T11:55:48.699354Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-10T11:55:48.699557Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 5 Cookie# 6583312504212282414 SessionId# [3:91:4] Binding# {1.2/8516222649268866340@[3:134:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] Test command err: RandomSeed# 16594771008234829697 2026-01-10T11:55:48.554160Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 10320974604939556404 2026-01-10T11:55:48.560296Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 10320974604939556404 SessionId# [8:149:1] Binding# {4.1/13507243337565903087@[8:80:4]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 clusterStateGeneration=10 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-01-10T11:55:48.628178Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 10 ClusterStateGuid: 0 2026-01-10T11:55:48.637408Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.637505Z 1 00h00m21.828482s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.637571Z 1 00h00m21.828482s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.637600Z 1 00h00m21.828482s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.637648Z 1 00h00m21.828482s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.637689Z 1 00h00m21.828482s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.637726Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.637768Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.637802Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.637830Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.637858Z 1 00h00m21.828482s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.654082Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.654151Z 1 00h00m48.464564s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.654179Z 1 00h00m48.464564s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.654217Z 1 00h00m48.464564s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.654250Z 1 00h00m48.464564s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.654267Z 1 00h00m48.464564s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.654292Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.654317Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.654350Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.654367Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.654384Z 1 00h00m48.464564s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 4887845776050393042 2026-01-10T11:55:48.739219Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 15595133610601217363 2026-01-10T11:55:48.739298Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.746313Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 15595133610601217363 SessionId# [1:130:1] Binding# {2.9/15595133610601217363@[1:130:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.746418Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {2.9/15595133610601217363@[1:130:1]} Reason# binding cycle 2026-01-10T11:55:48.746481Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {2.9/15595133610601217363@[1:130:1]} 2026-01-10T11:55:48.746554Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 2 Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} 2026-01-10T11:55:48.746617Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.746681Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 2 Binding# {2.0/15595133610601217364@[0:0:0]} SessionId# [0:0:0] 2026-01-10T11:55:48.746727Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007536s 2026-01-10T11:55:48.746783Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 15595133610601217363 2026-01-10T11:55:48.746823Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.746982Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 15595133610601217363 SessionId# [2:131:1] Binding# {9.1/6638594332720953225@[2:126:8]} Record# {BoundNodes { NodeId { Host: "127.0.0.9" Port: 19001 NodeId: 9 } Meta { Fingerprint: "\3403\207\365\032> Record# {RootNodeId: 9 } 2026-01-10T11:55:48.748519Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 5941796787392382709 2026-01-10T11:55:48.748550Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.748593Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 5941796787392382709 SessionId# [3:134:1] Binding# {1.2/5941796787392382709@[3:134:1]} Record# {RootNodeId: 9 } 2026-01-10T11:55:48.748688Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 5941796787392382709 2026-01-10T11:55:48.748720Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.748868Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 5941796787392382709 SessionId# [1:133:2] Binding# {2.0/15595133610601217364@[0:0:0]} Record# {BoundNodes { NodeId { Host: "127.0.0.1" Port: 19001 NodeId: 1 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.749634Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.749675Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-01-10T11:55:48.749906Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 15595133610601217363 2026-01-10T11:55:48.749940Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.749970Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 15595133610601217363 SessionId# [2:131:1] Binding# 2026-01-10T11:55:48.749996Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind reques ... 8.821828Z 1 00h00m00.778220s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.821843Z 1 00h00m00.778220s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.821858Z 1 00h00m00.778220s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.821876Z 1 00h00m00.778220s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.821904Z 1 00h00m00.778220s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.821927Z 1 00h00m00.778220s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.821950Z 1 00h00m00.778220s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.821966Z 1 00h00m00.778220s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.822662Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.822708Z 1 00h00m01.624877s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.822734Z 1 00h00m01.624877s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.822762Z 1 00h00m01.624877s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.822782Z 1 00h00m01.624877s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.822801Z 1 00h00m01.624877s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.822831Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.822859Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.822874Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.822890Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.822906Z 1 00h00m01.624877s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.823901Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.823939Z 1 00h00m03.504455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.823963Z 1 00h00m03.504455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.823996Z 1 00h00m03.504455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.824012Z 1 00h00m03.504455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.824027Z 1 00h00m03.504455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.824054Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.824099Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.824116Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.824131Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.824146Z 1 00h00m03.504455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.826041Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.826091Z 1 00h00m07.526751s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.826120Z 1 00h00m07.526751s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.826141Z 1 00h00m07.526751s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.826159Z 1 00h00m07.526751s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.826184Z 1 00h00m07.526751s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.826205Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.826234Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.826257Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.826273Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.826288Z 1 00h00m07.526751s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.828207Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 2026-01-10T11:55:48.828282Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 1 2026-01-10T11:55:48.831490Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.831555Z 1 00h00m16.456248s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.831582Z 1 00h00m16.456248s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.831599Z 1 00h00m16.456248s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.831627Z 1 00h00m16.456248s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.831643Z 1 00h00m16.456248s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.831666Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.831703Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.831729Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.831748Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.831765Z 1 00h00m16.456248s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.833969Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2026-01-10T11:55:48.839091Z 1 00h00m30.172812s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2026-01-10T11:55:48.841525Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.841585Z 1 00h00m35.743961s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.841614Z 1 00h00m35.743961s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.841631Z 1 00h00m35.743961s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.841662Z 1 00h00m35.743961s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.841678Z 1 00h00m35.743961s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.841703Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.841737Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.841763Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.841786Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.841804Z 1 00h00m35.743961s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] Test command err: RandomSeed# 11267643195283232621 2026-01-10T11:55:48.675890Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:151:8] SessionId# [0:0:0] Cookie# 6 2026-01-10T11:55:48.675956Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 9 SessionId# [1:151:8] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:48.676015Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {9.0/655845848057871034@[0:0:0]} 2026-01-10T11:55:48.676144Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [8:269:20] SessionId# [2:123:7] Cookie# 18229893176818476915 2026-01-10T11:55:48.676195Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [2:123:7] Inserted# false Subscription# {SessionId# [2:123:7] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.683526Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 8 Cookie# 18229893176818476915 SessionId# [2:123:7] Binding# {8.0/18229893176818476915@[2:123:7]} Record# {Rejected: true } 2026-01-10T11:55:48.683607Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {8.0/18229893176818476915@[2:123:7]} Reason# binding rejected by peer 2026-01-10T11:55:48.683682Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {8.0/18229893176818476915@[2:123:7]} 2026-01-10T11:55:48.683760Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 8 Subscription# {SessionId# [2:123:7] SubscriptionCookie# 0} 2026-01-10T11:55:48.683818Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007686s 2026-01-10T11:55:48.683946Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 655845848057871033 2026-01-10T11:55:48.683991Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.684032Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 655845848057871033 SessionId# [7:146:1] Binding# {3.2/770016851481478897@[7:98:3]} 2026-01-10T11:55:48.684070Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-01-10T11:55:48.684126Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-01-10T11:55:48.684197Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-01-10T11:55:48.684256Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} 2026-01-10T11:55:48.684336Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 4084657777769612450 2026-01-10T11:55:48.684368Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.684431Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4084657777769612450 SessionId# [5:140:1] Binding# {1.2/4084657777769612450@[5:140:1]} Record# {RootNodeId: 1 } 2026-01-10T11:55:48.684531Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [7:262:20] SessionId# [3:97:6] Cookie# 770016851481478897 2026-01-10T11:55:48.684569Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [3:97:6] Inserted# false Subscription# {SessionId# [3:97:6] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.684715Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 7 Cookie# 770016851481478897 SessionId# [3:97:6] Binding# {2.2/17422327313737512608@[3:109:2]} Record# {DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.684770Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [3:97:6] Inserted# false Subscription# {SessionId# [3:97:6] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-01-10T11:55:48.684813Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.9:19001/9 2026-01-10T11:55:48.684878Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.3:19001/3 2026-01-10T11:55:48.684908Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.2:19001/2 2026-01-10T11:55:48.684991Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:262:20] SessionId# [1:145:6] Cookie# 655845848057871033 2026-01-10T11:55:48.685046Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 655845848057871033 SessionId# [1:145:6] Binding# {9.0/655845848057871034@[1:151:8]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.685081Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [8:80:4] Cookie# 7861817532536855030 2026-01-10T11:55:48.685118Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [8:80:4] Inserted# false Subscription# {SessionId# [8:80:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.685159Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 7861817532536855030 SessionId# [8:80:4] Binding# {4.1/7861817532536855030@[8:80:4]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.685211Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [6:74:4] Cookie# 5982209125084973544 2026-01-10T11:55:48.685251Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [6:74:4] Inserted# false Subscription# {SessionId# [6:74:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:48.685307Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 5982209125084973544 SessionId# [6:74:4] Binding# {4.1/5982209125084973544@[6:74:4]} Record# {RootNodeId: 2 } 2026-01-10T11:55:48.685603Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 655845848057871034 2026-01-10T11:55:48.685759Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 655845848057871034 SessionId# [9:152:1] Binding# {3.2/14010268298730287916@[9:104:3]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.1" Port: 19001 NodeId: 1 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:48.687964Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-01-10T11:55:48.687998Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.2:19001/2 2026-01-10T11:55:48.688341Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [9:152:1] SessionId# [0:0:0] Cookie# 2 2026-01-10T11:55:48.688379Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 1 SessionId# [9:152:1] Cookie# 2 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:48.689773Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [9:276:20] SessionId# [1:151:8] Cookie# 655845848057871034 2026-01-10T11:55:48.689813Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 9 SessionId# [1: ... leLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.746846Z 1 00h00m01.893419s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.746877Z 1 00h00m01.893419s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.746910Z 1 00h00m01.893419s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.746942Z 1 00h00m01.893419s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.749172Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.749240Z 1 00h00m04.173808s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.749270Z 1 00h00m04.173808s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.749295Z 1 00h00m04.173808s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.749319Z 1 00h00m04.173808s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.749345Z 1 00h00m04.173808s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.749398Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.749438Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.749464Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.749506Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.749555Z 1 00h00m04.173808s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.753587Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.753653Z 1 00h00m09.327487s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.753685Z 1 00h00m09.327487s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.753748Z 1 00h00m09.327487s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.753775Z 1 00h00m09.327487s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.753800Z 1 00h00m09.327487s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.753830Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.753861Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.753891Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.753918Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.753950Z 1 00h00m09.327487s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.755406Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.755517Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:48.755561Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:48.755587Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:48.755639Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:48.755676Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:48.755725Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.755801Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.755841Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.763260Z 1 00h00m19.944065s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.763351Z 1 00h00m19.944065s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:48.763401Z 1 00h00m19.944065s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:48.763457Z 1 00h00m19.944065s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:48.763488Z 1 00h00m19.944065s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:48.763515Z 1 00h00m19.944065s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:48.763562Z 1 00h00m19.944065s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:48.763604Z 1 00h00m19.944065s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 2} 2026-01-10T11:55:48.763626Z 1 00h00m19.944065s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-10T11:55:48.763726Z 1 00h00m19.944065s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-01-10T11:55:48.764997Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:48.765091Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:48.765136Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:48.765172Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:48.765203Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:48.765229Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:48.765313Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.765370Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:48.765396Z 1 00h00m20.100000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-10T11:55:48.765498Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-01-10T11:55:48.765613Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:48.765648Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-10T11:55:48.765699Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-10T11:55:48.765780Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72057594037932033} 2026-01-10T11:55:48.765809Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-10T11:55:48.765855Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-10T11:55:48.768562Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: NKikimrStateStorage.TEvCleanup TabletID: 72057594037932033 ProposedLeader { RawX1: 0 RawX2: 0 } ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-01-10T11:55:48.768613Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-01-10T11:55:48.768670Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 1537616380811327807 2026-01-10T11:55:49.159912Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 7187385904245045753 2026-01-10T11:55:49.159981Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 9 2026-01-10T11:55:49.168594Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 7187385904245045753 SessionId# [1:133:2] Binding# {3.0/7187385904245045753@[1:133:2]} Record# {RootNodeId: 5 } 2026-01-10T11:55:49.168738Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008859s 2026-01-10T11:55:49.168805Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [2:108:2] Cookie# 2744689298707582317 2026-01-10T11:55:49.168854Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-01-10T11:55:49.169166Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 2744689298707582317 SessionId# [2:108:2] Binding# {1.1/9103467594267655102@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-01-10T11:55:49.171281Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [1:136:3] Inserted# false Subscription# {SessionId# [1:136:3] SubscriptionCookie# 0} NextSubscribeCookie# 9 2026-01-10T11:55:49.171318Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 4 NodeId# 127.0.0.5:19001/5 2026-01-10T11:55:49.171404Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 9 2026-01-10T11:55:49.171460Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 5 Binding# {5.0/7187385904245045754@[0:0:0]} SessionId# [0:0:0] 2026-01-10T11:55:49.171777Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [3:91:4] SessionId# [0:0:0] Cookie# 6 2026-01-10T11:55:49.171815Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 5 SessionId# [3:91:4] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:49.172005Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 2144808136711849403 2026-01-10T11:55:49.172047Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-01-10T11:55:49.172125Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 2144808136711849403 SessionId# [9:152:1] Binding# {1.1/2144808136711849403@[9:152:1]} Record# {RootNodeId: 5 } 2026-01-10T11:55:49.172182Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 26863923 ... 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.236618Z 1 00h00m00.419874s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.236652Z 1 00h00m00.419874s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.236680Z 1 00h00m00.419874s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.236709Z 1 00h00m00.419874s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.236745Z 1 00h00m00.419874s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.236785Z 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.236818Z 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.236855Z 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.236893Z 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.236926Z 1 00h00m00.419874s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.237241Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.237323Z 1 00h00m00.891590s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.237357Z 1 00h00m00.891590s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.237389Z 1 00h00m00.891590s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.237417Z 1 00h00m00.891590s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.237450Z 1 00h00m00.891590s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.237499Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.237532Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.237566Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.237604Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.237639Z 1 00h00m00.891590s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.238747Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.238817Z 1 00h00m01.929365s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.238866Z 1 00h00m01.929365s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.238900Z 1 00h00m01.929365s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.238928Z 1 00h00m01.929365s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.238957Z 1 00h00m01.929365s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.239004Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.239053Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.239084Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.239116Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.239149Z 1 00h00m01.929365s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.241449Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.241533Z 1 00h00m04.295492s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.241579Z 1 00h00m04.295492s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.241617Z 1 00h00m04.295492s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.241651Z 1 00h00m04.295492s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.241677Z 1 00h00m04.295492s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.241715Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.241747Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.241776Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.241806Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.241850Z 1 00h00m04.295492s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.246348Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.246438Z 1 00h00m09.642939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.246488Z 1 00h00m09.642939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.246536Z 1 00h00m09.642939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.246564Z 1 00h00m09.642939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.246599Z 1 00h00m09.642939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.246642Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.246682Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.246711Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.246747Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.246779Z 1 00h00m09.642939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.248980Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-01-10T11:55:49.249081Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:49.249130Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:49.249160Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:49.249202Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:49.249247Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:49.249307Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.249366Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.249432Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.249475Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig undelivered ringGroup:0 for: 3 2026-01-10T11:55:49.249537Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] Test command err: RandomSeed# 17094144845206372583 2026-01-10T11:55:48.956054Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 10482238218017558251 2026-01-10T11:55:48.956234Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-01-10T11:55:48.980030Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 10482238218017558251 SessionId# [8:149:1] Binding# {3.3/17264256044056339453@[8:101:3]} Record# {BoundNodes { NodeId { Host: "127.0.0.5" Port: 19001 NodeId: 5 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2026-01-10T11:55:49.084981Z 1 00h00m20.177898s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.085096Z 1 00h00m20.177898s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.085113Z 1 00h00m20.177898s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.085219Z 1 00h00m20.177898s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.085435Z 1 00h00m20.177898s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2026-01-10T11:55:49.085459Z 1 00h00m20.177898s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 2} 2026-01-10T11:55:49.085574Z 1 00h00m20.177898s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-10T11:55:49.085824Z 1 00h00m20.177898s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2026-01-10T11:55:49.104669Z 1 00h00m30.187520s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.104894Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:49.104985Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:49.105008Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2026-01-10T11:55:49.105133Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:49.105217Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:49.105242Z 1 00h00m30.187520s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:49.105282Z 1 00h00m30.187520s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.105471Z 1 00h00m30.187520s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2026-01-10T11:55:49.105588Z 1 00h00m30.187520s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.105756Z 1 00h00m30.187520s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-01-10T11:55:49.105923Z 1 00h00m30.187520s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2026-01-10T11:55:49.124297Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.124387Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:49.124667Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:49.124695Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:49.124818Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:49.124843Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:49.125081Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.125313Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.125347Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.130753Z 1 00h00m42.763784s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.130937Z 1 00h00m42.763784s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T11:55:49.130975Z 1 00h00m42.763784s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T11:55:49.131132Z 1 00h00m42.763784s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T11:55:49.131261Z 1 00h00m42.763784s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-01-10T11:55:49.131289Z 1 00h00m42.763784s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-01-10T11:55:49.131322Z 1 00h00m42.763784s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T11:55:49.131527Z 1 00h00m42.763784s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 0} 2026-01-10T11:55:49.131552Z 1 00h00m42.763784s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-01-10T11:55:49.131707Z 1 00h00m42.763784s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 2026-01-10T11:55:49.145662Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T11:55:49.145749Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T11:55:49.145786Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T11:55:49.145812Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T11:55:49.145856Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-01-10T11:55:49.145933Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-01-10T11:55:49.145980Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.146023Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T11:55:49.146044Z 1 00h00m50.300000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-01-10T11:55:49.146213Z 1 00h00m50.300000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 9679203712010331065 2026-01-10T11:55:49.323266Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [4:137:1] SessionId# [0:0:0] Cookie# 3 2026-01-10T11:55:49.323347Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 1 SessionId# [4:137:1] Cookie# 3 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:49.323411Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [2:131:1] SessionId# [0:0:0] Cookie# 6 2026-01-10T11:55:49.323442Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 1 SessionId# [2:131:1] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-01-10T11:55:49.323507Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {1.0/12229938686803642201@[0:0:0]} 2026-01-10T11:55:49.323640Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:256:20] SessionId# [1:136:3] Cookie# 14430289392913396325 2026-01-10T11:55:49.323685Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [1:136:3] Inserted# false Subscription# {SessionId# [1:136:3] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-01-10T11:55:49.332126Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 14430289392913396325 SessionId# [1:136:3] Binding# {4.0/14430289392913396325@[1:136:3]} Record# {RootNodeId: 2 } 2026-01-10T11:55:49.332244Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008602s 2026-01-10T11:55:49.332312Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [4:256:20] SessionId# [2:111:3] Cookie# 15458983065015215709 2026-01-10T11:55:49.332374Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [2:111:3] Inserted# false Subscription# {SessionId# [2:111:3] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-01-10T11:55:49.332724Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 4 Cookie# 15458983065015215709 SessionId# [2:111:3] Binding# {1.0/12229938686803642201@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |81.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |81.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk >> BridgeGet::PartRestorationAcrossBridge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] Test command err: RandomSeed# 10073913113428216571 originalGroupIndex# 0 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 0 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 >> BSCMovePDisk::PDiskMove_ErasureNone >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |81.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 18338078746282217284 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 5646036390723285189 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] Test command err: RandomSeed# 14049962857868197681 2026-01-10T11:55:51.415262Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:51.416373Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9785963613652258084] 2026-01-10T11:55:51.427154Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:51.489963Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:51.491031Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1410395701868572440] 2026-01-10T11:55:51.502234Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:51.548724Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:51.549845Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11342357876478539976] 2026-01-10T11:55:51.557239Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:51.848648Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:51.849782Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9991518079920944473] 2026-01-10T11:55:51.856316Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:51.963793Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:51.964901Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11185927812368603114] 2026-01-10T11:55:52.152408Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:52.267313Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:52.269057Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5366551138225281108] 2026-01-10T11:55:52.279849Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:52.700267Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:52.701952Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 710056373210503594] 2026-01-10T11:55:52.710431Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:52.873617Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:52.875347Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17781319185327084096] 2026-01-10T11:55:52.938609Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 6850386313166304227 2026-01-10T11:55:49.493474Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:49.495656Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8884909975988652501] 2026-01-10T11:55:49.549756Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 17787309395533937710 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] Test command err: RandomSeed# 1481580880931821918 2026-01-10T11:55:54.766967Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:54.768258Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6140346156209910267] 2026-01-10T11:55:54.792240Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:54.875954Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:54.877325Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12876950477514126793] 2026-01-10T11:55:54.889454Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:54.949214Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:54.950518Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3061961745902387225] 2026-01-10T11:55:54.959348Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:55.052735Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:55.054016Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8946764148424193811] 2026-01-10T11:55:55.062604Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:55.406216Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:55.407557Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3708553030935627649] 2026-01-10T11:55:55.415831Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:55.525134Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:55.526619Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11135439405379261609] 2026-01-10T11:55:55.540542Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:55.604574Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:55.605936Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13442192813061519764] 2026-01-10T11:55:55.614875Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:55.709910Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:55.711197Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17125917857982518051] 2026-01-10T11:55:55.720673Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.129424Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.130825Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18419329817568181660] 2026-01-10T11:55:56.139298Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.217944Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.219261Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 318368007962911439] 2026-01-10T11:55:56.235835Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.303728Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.305145Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6536884598301519532] 2026-01-10T11:55:56.314496Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.412421Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.413842Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14751362301093166959] 2026-01-10T11:55:56.423424Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.852632Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.854042Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3031094295564903173] 2026-01-10T11:55:56.861575Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:56.962862Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:56.964261Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2401837384904758145] 2026-01-10T11:55:57.034812Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:57.112159Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:57.113515Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 975917540846261925] 2026-01-10T11:55:57.123138Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-01-10T11:55:57.234630Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:57.236153Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16543164290000933975] 2026-01-10T11:55:57.245587Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] |81.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 7071061729246900320 |81.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 7150359172935679213 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 13916346338274861146 |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |81.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> BSCRestartPDisk::RestartOneByOne [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 8087644100469300247 2026-01-10T11:55:29.161473Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.223507s 2026-01-10T11:55:29.162264Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.224635s |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/gateway/ut/gtest |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 8681975244716177919 2026-01-10T11:55:29.893004Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.120200s 2026-01-10T11:55:29.893115Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.120350s |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |81.3%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |81.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |81.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |81.3%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |81.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 3604540500277650642 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.4%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |81.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> SequenceShardTests::Basics |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |81.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |81.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |81.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> KqpExecuter::TestSuddenAbortAfterReady >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> SequenceShardTests::NegativeIncrement [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2026-01-10T11:56:46.659174Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-10T11:56:46.659306Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-10T11:56:46.705906Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-10T11:56:46.775032Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-10T11:56:46.775370Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-10T11:56:46.820961Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-10T11:56:46.821103Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2026-01-10T11:56:46.868014Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-10T11:56:46.868366Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-10T11:56:46.868405Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:33: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:46.868452Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-10T11:56:46.868658Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2026-01-10T11:56:46.868766Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2026-01-10T11:56:46.917050Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-10T11:56:46.919388Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:46.920499Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2026-01-10T11:56:46.954879Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:46.957446Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2026-01-10T11:56:46.957937Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2026-01-10T11:56:46.991409Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:46.993587Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-10T11:56:46.994311Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2026-01-10T11:56:47.028910Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.032383Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2026-01-10T11:56:47.033527Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2026-01-10T11:56:47.061854Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.063756Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2026-01-10T11:56:47.064578Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2026-01-10T11:56:47.064981Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.067718Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-10T11:56:47.070014Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2026-01-10T11:56:47.105016Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.106666Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-10T11:56:47.107003Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.107337Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.112016Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.112679Z node 1 :SEQUENCESHARD NOTICE: tx_drop_sequence.cpp:43: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.149326Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-10T11:56:47.152287Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.152865Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:33: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.153159Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-10T11:56:47.265962Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-10T11:56:47.266871Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-10T11:56:47.273056Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-10T11:56:47.277977Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-10T11:56:47.279844Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-10T11:56:47.321305Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:47.321608Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:47.321894Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.324123Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-10T11:56:47.324624Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2026-01-10T11:56:47.347962Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.350807Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2026-01-10T11:56:47.350888Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-10T11:56:47.388602Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-10T11:56:47.391907Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-10T11:56:47.394488Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2026-01-10T11:56:47.438947Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:47.451466Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2026-01-10T11:56:47.452411Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-10T11:56:47.490418Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-10T11:56:47.490686Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-10T11:56:47.490745Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2026-01-10T11:56:47.522779Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard ... Id: 43] Cache# 0 2026-01-10T11:56:50.019452Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2026-01-10T11:56:50.030769Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.031086Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2026-01-10T11:56:50.031116Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:66: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-10T11:56:50.031171Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-10T11:56:50.031357Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.031436Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.042807Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-10T11:56:50.043004Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.043051Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.054237Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-10T11:56:50.054411Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.054454Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-10T11:56:50.065661Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-10T11:56:50.065861Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.065894Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:54: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2026-01-10T11:56:50.065931Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.066098Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-10T11:56:50.066153Z node 3 :SEQUENCESHARD NOTICE: tx_freeze_sequence.cpp:68: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-10T11:56:50.077493Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-10T11:56:50.077996Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-10T11:56:50.078103Z node 3 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-10T11:56:50.089428Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-10T11:56:50.089651Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-10T11:56:50.089710Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-10T11:56:50.101021Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-10T11:56:50.101253Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-10T11:56:50.101285Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:48: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2026-01-10T11:56:50.101333Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-10T11:56:50.101506Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.101576Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2026-01-10T11:56:50.112918Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.437856Z node 4 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-10T11:56:50.437932Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-10T11:56:50.445066Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-10T11:56:50.447651Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-10T11:56:50.447701Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-10T11:56:50.449162Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2026-01-10T11:56:50.449256Z node 4 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2026-01-10T11:56:50.471230Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-10T11:56:50.471517Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.471614Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-10T11:56:50.483429Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.483756Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.483841Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-10T11:56:50.495659Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.495962Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-10T11:56:50.496045Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2026-01-10T11:56:50.507795Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.508163Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-10T11:56:50.508212Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:50.508274Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.508542Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2026-01-10T11:56:50.508625Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-10T11:56:50.520585Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-10T11:56:50.520919Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.521015Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-10T11:56:50.533003Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-10T11:56:50.533361Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-10T11:56:50.533452Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-10T11:56:50.545395Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |81.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |81.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceshard/ut/unittest |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> Vacuum::Vacuum >> TFetchRequestTests::HappyWay >> DataShardStats::OneChannelStatsCorrect >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TIndexProcesorTests::TestCreateIndexProcessor >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] >> Vacuum::Vacuum [GOOD] >> Vacuum::VacuumWithoutCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] Test command err: Trying to start YDB, gRPC: 14632, MsgBus: 24348 ... waiting for SysViewsRoster update finished 2026-01-10T11:56:52.697320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:56:52.773286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:56:52.779904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:56:52.780252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:56:52.780328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:56:53.098261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:56:53.098405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:56:53.102936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046210357602 != 1768046210357605 2026-01-10T11:56:53.120764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:56:53.168222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:56:53.298936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:56:53.684831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:56:53.684910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:56:53.684964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:56:53.685568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:56:53.697555Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:56:54.099762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:56:54.224857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:54.531658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:54.882999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:55.123492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:56.358502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:56:56.358747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:56:56.359438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:56:56.359561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:56:56.382785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:56.611274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:57.070110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:57.612930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:58.044576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:58.575335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:59.149775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:59.797131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:00.320138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:00.320332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:00.320747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:00.320850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:00.320924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:00.325539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:00.494498Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2800:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T11:57:00.564967Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2860:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Got NActors::IEventHandle [1:3147:4460] [1:360:2377] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:0] [1:3147:4460] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3147:4460] [1:306:2349] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:0] [1:3147:4460] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3147:4460] [1:306:2349] Got NKikimr::TEvTabletBase ... 48544295275:29292] [1:3162:4459] 2026-01-10T11:57:06.213250Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3162:4459] TxId: 281474976715673. Ctx: { TraceId: 01kekw8s6b01pe35ysbr4130fr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjM2OTY2YmUtYzY1YWFmNDgtNjIyMDk1NGItYTRlOTgzYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3162:4459] Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3162:4459] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3146:4459] [1:3162:4459] 2026-01-10T11:57:06.214820Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NjM2OTY2YmUtYzY1YWFmNDgtNjIyMDk1NGItYTRlOTgzYg==, ActorId: [1:3146:4459], ActorState: ExecuteState, LegacyTraceId: 01kekw8s6b01pe35ysbr4130fr, Create QueryResponse for error on request, msg: status# STATUS_CODE_UNSPECIFIED issues# trace_id# Got NKikimr::NSysView::TEvSysView::TEvCollectQueryStats [1:6014387330472966483:2188150] [1:3146:4459] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3146:4459] Got NActors::TEvents::TEvPoison [1:3162:4459] [1:3162:4459] Got NKikimr::NKqp::NWorkload::TEvCleanupRequest [1:2868:4231] [1:3146:4459] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFinishRequestInPool [1:7742373267896299883:25708] [1:2868:4231] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3162:4459] [1:285:2329] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3163:4459] [1:68:2115] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3163:4459] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3163:4459] Got NKikimr::NKqp::NWorkload::TEvCleanupResponse [1:3146:4459] [1:2868:4231] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3163:4459] 2026-01-10T11:57:06.216948Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3163:4459] TxId: 281474976715674. Ctx: { TraceId: 01kekw8s6b01pe35ysbr4130fr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjM2OTY2YmUtYzY1YWFmNDgtNjIyMDk1NGItYTRlOTgzYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3163:4459] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3146:4459] [1:3163:4459] 2026-01-10T11:57:06.217094Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3081} SessionId: ydb://session/3?node_id=1&id=NjM2OTY2YmUtYzY1YWFmNDgtNjIyMDk1NGItYTRlOTgzYg==, ActorId: [1:3146:4459], ActorState: CleanupState, LegacyTraceId: 01kekw8s6b01pe35ysbr4130fr, Failed to cleanup issues# trace_id# Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3163:4459] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:66:2113] [1:3146:4459] Got NActors::TEvents::TEvPoison [1:3163:4459] [1:3163:4459] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:3148:4461] [1:66:2113] Got NActors::TEvents::TEvPoison [1:3149:4462] [1:66:2113] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3163:4459] [1:285:2329] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1187:2913] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1190:2915] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1191:2918] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1193:2922] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2209:3717] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2286:3774] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2296:3776] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2385:3851] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2394:3853] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1420:3127] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1424:3131] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2496:3935] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2510:3937] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2514:3941] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2020:3560] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:960:2803] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:970:2805] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2081:3611] [0:0:0] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1053:2883] [1:1187:2913] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1055:2885] [1:1190:2915] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1056:2886] [1:1191:2918] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1058:2888] [1:1193:2922] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:2148:3667] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1138:2904] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1144:2906] [0:0:0] Got NKikimr::NTabletFlatExecutor::TExecutor::TEvPrivate::TEvLeaseExtend [1:1163:2911] [0:0:0] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2193:3712] [1:2209:3717] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2258:3766] [1:2286:3774] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2259:3767] [1:2296:3776] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2356:3843] [1:2385:3851] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2357:3844] [1:2394:3853] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1369:3115] [1:1420:3127] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1372:3117] [1:1424:3131] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2458:3924] [1:2496:3935] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2459:3925] [1:2510:3937] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2462:3927] [1:2514:3941] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2004:3556] [1:2020:3560] Got NKikimr::TEvTablet::TEvConfirmLeader [1:931:2794] [1:960:2803] Got NKikimr::TEvTablet::TEvConfirmLeader [1:932:2795] [1:970:2805] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1053:2883] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2065:3606] [1:2081:3611] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1045:2878] [1:1138:2904] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1047:2879] [1:1144:2906] Got NKikimr::TEvTablet::TEvConfirmLeader [1:1049:2881] [1:1163:2911] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1053:2883] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeader [1:2132:3662] [1:2148:3667] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1187:2913] [1:1053:2883] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1055:2885] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1056:2886] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1055:2885] [1:945:2802] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1056:2886] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1190:2915] [1:1055:2885] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1191:2918] [1:1056:2886] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1058:2888] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1058:2888] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1193:2922] [1:1058:2888] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2193:3712] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2193:3712] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2209:3717] [1:2193:3712] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2258:3766] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2259:3767] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2258:3766] [1:945:2802] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2259:3767] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2286:3774] [1:2258:3766] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2296:3776] [1:2259:3767] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2356:3843] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2356:3843] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2385:3851] [1:2356:3843] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2357:3844] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1369:3115] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2357:3844] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2394:3853] [1:2357:3844] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1369:3115] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1420:3127] [1:1369:3115] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1372:3117] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1372:3117] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1424:3131] [1:1372:3117] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2458:3924] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2458:3924] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2496:3935] [1:2458:3924] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2459:3925] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2459:3925] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2510:3937] [1:2459:3925] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2462:3927] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2462:3927] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2514:3941] [1:2462:3927] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2004:3556] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2004:3556] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2020:3560] [1:2004:3556] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:931:2794] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:932:2795] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:931:2794] [1:945:2802] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:932:2795] [1:945:2802] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2065:3606] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:960:2803] [1:931:2794] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:970:2805] [1:932:2795] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2065:3606] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2081:3611] [1:2065:3606] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1045:2878] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1045:2878] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1138:2904] [1:1045:2878] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1047:2879] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1047:2879] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1144:2906] [1:1047:2879] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:1049:2881] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:1049:2881] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:1163:2911] [1:1049:2881] Got NKikimr::TEvBlobStorage::TEvGetBlock [0:34190892238664546:8519680] [1:2132:3662] Got NKikimr::TEvBlobStorage::TEvGetBlockResult [1:2132:3662] [1:945:2802] Got NKikimr::TEvTablet::TEvConfirmLeaderResult [1:2148:3667] [1:2132:3662] |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/executer_actor/ut/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.7%| [TM] {RESULT} ydb/core/kqp/executer_actor/ut/unittest |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [FAIL] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::CDC >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2026-01-10T11:56:54.169186Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvBoot 2026-01-10T11:56:54.173054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvRestored 2026-01-10T11:56:54.173348Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:112:2143] 2026-01-10T11:56:54.173547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:56:54.183858Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:56:54.229518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:56:54.229970Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:56:54.231895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T11:56:54.231997Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T11:56:54.232096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T11:56:54.232500Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:56:54.232751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:56:54.232816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:136:2143] in generation 2 2026-01-10T11:56:54.268735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:56:54.306897Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T11:56:54.307113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:56:54.307262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:141:2163] 2026-01-10T11:56:54.307309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T11:56:54.307362Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T11:56:54.307409Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:56:54.307581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:112:2143], Recipient [1:112:2143]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:56:54.307629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:56:54.307887Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T11:56:54.307997Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T11:56:54.308055Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T11:56:54.308111Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:56:54.308152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T11:56:54.308187Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T11:56:54.308232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T11:56:54.308281Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T11:56:54.308325Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T11:56:54.309582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [1:103:2137], Recipient [1:112:2143]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 4294969433 } 2026-01-10T11:56:54.309650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState ... waiting for SysViewsRoster update finished 2026-01-10T11:56:59.548443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:00.083484Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:00.083817Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:00.083893Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:00.333672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:00.333808Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:00.406611Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046214675726 != 1768046214675730 2026-01-10T11:57:00.420877Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:00.464021Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:00.639701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:01.174124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:01.198112Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:01.379991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:01.526069Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-10T11:57:01.529395Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:57:01.830202Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:57:01.831397Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:57:01.854578Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T11:57:01.854685Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T11:57:01.854743Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T11:57:01.855067Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:57:01.855194Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:57:01.855273Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-10T11:57:01.866842Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:57:01.867145Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T11:57:01.867631Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:57:01.868180Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-10T11:57:01.868424Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:57:01.868650Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T11:57:01.869285Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:01.870667Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T11:57:01.871320Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T11:57:01.871807Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:57:01.872232Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:57:01.872434Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T11:57:01.872812Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:01.873087Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:884:2764], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-10T11:57:01.874486Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T11:57:01.874710Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T11:57:01.874807Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T11:57:01.876843Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:57:01.887578Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T11:57:01.8 ... planned 0 immediate 0 planned 1 2026-01-10T11:57:17.673633Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T11:57:17.674109Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T11:57:17.674703Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T11:57:17.677021Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:57:17.677074Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T11:57:17.678780Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T11:57:17.680853Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:57:17.684132Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T11:57:17.684171Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:17.686939Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T11:57:17.687252Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:17.692301Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:57:17.692533Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:17.692564Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:57:17.692877Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T11:57:17.692925Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T11:57:17.692964Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T11:57:17.693242Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:17.700838Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T11:57:17.701318Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T11:57:17.701598Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T11:57:17.724311Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T11:57:17.725664Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T11:57:17.798030Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:956:2812] 2026-01-10T11:57:17.798250Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:57:17.801654Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:57:17.802491Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:57:17.812710Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T11:57:17.813129Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T11:57:17.813514Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T11:57:17.816331Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:57:17.816589Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:57:17.816642Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:971:2812] in generation 2 2026-01-10T11:57:17.839878Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:57:17.839974Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-01-10T11:57:17.840051Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T11:57:17.840153Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T11:57:17.840206Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4210: Resolve path at 72075186224037888: reason# empty path 2026-01-10T11:57:17.840299Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:975:2822] 2026-01-10T11:57:17.840335Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:57:17.840370Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T11:57:17.840394Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:17.840597Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-10T11:57:17.840824Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-10T11:57:17.841684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976710657 message# Source { RawX1: 956 RawX2: 12884904700 } Origin: 72075186224037888 State: 2 TxId: 281474976710657 Step: 0 Generation: 2 2026-01-10T11:57:17.841740Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T11:57:17.841810Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T11:57:17.841958Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2000 2026-01-10T11:57:17.841982Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:17.842211Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:57:17.842263Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:57:17.842290Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:57:17.842330Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T11:57:17.842358Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:17.842515Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T11:57:17.870076Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4271: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-10T11:57:17.870327Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T11:57:17.870405Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T11:57:17.870571Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:20: TTxStoreTablePath::Execute at 72075186224037888 2026-01-10T11:57:17.871345Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:979:2826], serverId# [3:981:2827], sessionId# [0:0:0] 2026-01-10T11:57:17.886195Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:39: TTxStoreTablePath::Complete at 72075186224037888 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_init/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] Test command err: RandomSeed# 5658362248865796794 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 2 *** performing bridge range ... sk2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> StatisticsScan::RunScanOnShard >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] |81.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue |81.8%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |81.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} >> Vacuum::VacuumWithoutCompaction [GOOD] >> Vacuum::MultipleVacuums >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |81.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> TMemoryController::Counters >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> DataShardReplication::SimpleApplyChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 12680337378233267252 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> Vacuum::MultipleVacuums [GOOD] >> Vacuum::MultipleVacuumsWithOldGenerations |81.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [FAIL] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TFetchRequestTests::CDC [GOOD] >> TFetchRequestTests::SmallBytesRead >> StatisticsScan::RunScanOnShard [GOOD] >> TCreateAndDropViewTest::CheckCreatedView >> test_cte.py::TestCte::test_toplevel >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:57:25.507984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:26.393971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:57:26.415966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:26.417835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:26.418516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:28.149134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:28.149274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:28.154674Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046240329214 != 1768046240329217 2026-01-10T11:57:28.172014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:28.218135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:28.315036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:28.601973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:28.643454Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:28.808917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:29.920742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:29.921706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:29.922662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:29.932372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:29.933472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:29.971582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:30.284372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:57:30.579885Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_column_stats/unittest >> Vacuum::MultipleVacuumsWithOldGenerations [GOOD] >> Vacuum::VacuumWithRestart >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> test.py::test[solomon-BadDownsamplingAggregation-] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest |81.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut >> TMLPChangerTests::TopicNotExists >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> Vacuum::VacuumWithRestart [GOOD] >> Vacuum::OutReadSetsCleanedAfterCopyTable >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.9%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> TFetchRequestTests::SmallBytesRead [GOOD] >> TFetchRequestTests::EmptyTopic >> TMLPConsumerTests::ReloadPQTablet >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> test_cte.py::TestCte::test_toplevel [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> TGRpcRateLimiterTest::CreateResource >> Vacuum::OutReadSetsCleanedAfterCopyTable [GOOD] >> Vacuum::BorrowerDataCleanedAfterCopyTable |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/plan2svg/py3test >> test_cte.py::TestCte::test_toplevel [GOOD] |81.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/plan2svg/py3test >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.9%| [TM] {RESULT} ydb/tests/functional/kqp/plan2svg/py3test |81.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2026-01-10T11:56:50.807533Z node 1 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:56:50.808037Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:56:50.808185Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-10T11:56:50.808293Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-10T11:56:50.808307Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:56:50.811953Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:56:50 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-10T11:56:50.812073Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:01.964204Z node 2 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:57:01.970817Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:02.109564Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-10T11:57:02.169651Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.245774Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.331017Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.424718Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.512052Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.609504Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:02.650236Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2026-01-10T11:57:02.652498Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-10T11:57:02.652719Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:02.652993Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:02.653011Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:03.120733Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:03 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:03.126932Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:03.128054Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:03.392668Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:03 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:03.407523Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:03.407561Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:03.801841Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:03 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:03.818397Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:03.818999Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:04.364097Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:04.374257Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:04.374299Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:04.778637Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:04 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:04.784708Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2026-01-10T11:57:04.784727Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-10T11:57:05.316303Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:05 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:05.317498Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:05.854579Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:05 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:05.855535Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:06.122129Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Sat, 10 Jan 2026 11:57:06 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 500} 2026-01-10T11:57:06.122769Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2026-01-10T11:57:18.437614Z node 3 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:57:18.535018Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:18.645967Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 79903 bytes of data to buffer 2026-01-10T11:57:18.701583Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:18.763253Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:18.818969Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:18.856384Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:18.899797Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:18.968686Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-10T11:57:19.004133Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 40513 bytes of data to buffer 2026-01-10T11:57:19.005413Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 79903 bytes to solomon 2026-01-10T11:57:19.005631Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.005796Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.005810Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:19.217132Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.217545Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.217568Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:19.292554Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.292921Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.292936Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:19.384560Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.387004Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.387044Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:19.455793Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.456168Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-10T11:57:19.456182Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:19.508639Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.509019Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 40513 bytes to solomon 2026-01-10T11:57:19.509046Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-10T11:57:19.562362Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.562492Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:19.612124Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-10T11:57:19.612257Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:19.638782Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 25 Date: Sat, 10 Jan 2026 11:57:19 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 500} 2026-01-10T11:57:19.638896Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream 2026-01-10T11:57:39.736410Z node 4 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:57:39.761597Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:39.801653Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2026-01-10T11:57:39.802741Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2026-01-10T11:57:39.802756Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:39.854809Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Sat, 10 Jan 2026 11:57:39 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 10} 2026-01-10T11:57:39.867039Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:51.199044Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:57:51.201299Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:51.218560Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-10T11:57:51.232762Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-10T11:57:51.239017Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2026-01-10T11:57:51.239405Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-10T11:57:51.239713Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-10T11:57:51.239842Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2026-01-10T11:57:51.239862Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-10T11:57:51.264628Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Sat, 10 Jan 2026 11:57:51 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 400} 2026-01-10T11:57:51.264743Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2026-01-10T11:57:51.427141Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:51 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:51.427287Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2026-01-10T11:57:51.480835Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sat, 10 Jan 2026 11:57:51 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-10T11:57:51.480970Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:57.089261Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-10T11:57:57.089879Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:57.089991Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-10T11:57:57.090096Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-10T11:57:57.090107Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2026-01-10T11:57:57.127790Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:57 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-10T11:57:57.129946Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:57.133673Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-10T11:57:57.135649Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-10T11:57:57.136657Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-10T11:57:57.136669Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-10T11:57:57.305061Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sat, 10 Jan 2026 11:57:57 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-10T11:57:57.306840Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |81.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/providers/solomon/actors/ut/unittest >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.9%| [TM] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest |81.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas >> TDescriberTests::TopicExists >> TListAllTopicsTests::PlainList >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> TMLPChangerTests::TopicNotExists [GOOD] >> TMLPChangerTests::ConsumerNotExists >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] >> Discovery::DelayedNameserviceResponse >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_vacuum/unittest >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:56:59.098908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:56:59.872202Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-10T11:56:59.948872Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:56:59.951503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:56:59.952445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:00.312643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:00.312757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:00.377290Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:36:2083] 1768046214343976 != 1768046214343979 2026-01-10T11:57:00.388696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:00.433869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:00.536007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:01.228577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:01.264951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:01.422319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:02.750965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:986:2843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:02.751056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:02.752599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:02.764482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1000:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:02.765197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:02.798672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:03.086374Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1001:2852], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:57:03.298931Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1058:2890] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-10T11:57:18.657024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:18.678836Z node 2 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-10T11:57:18.701765Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:18.702420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:18.703351Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:19.322688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:19.322820Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:19.347890Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:36:2083] 1768046231888690 != 1768046231888694 2026-01-10T11:57:19.355818Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:19.405298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:19.487175Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:19.774329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:19.788169Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:19.892414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:20.174857Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:979:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:20.174946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:990:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:20.175001Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:20.175745Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:20.175876Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:20.179201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:20.282305Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:993:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:57:20.317069Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1050:2883] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-10T11:57:24.307563Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:24.325856Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-10T11:57:24.328497Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Faile ... message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-10T11:57:59.647072Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:59.651826Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-10T11:57:59.652829Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:57:59.659080Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:316:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:59.659312Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:59.659500Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:58:01.429756Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:01.429891Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:01.454250Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:36:2083] 1768046273391710 != 1768046273391714 2026-01-10T11:58:01.460973Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:01.520023Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:01.668869Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:58:02.147038Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:02.186108Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:02.473721Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:04.346042Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:04.346865Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1086:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:04.347263Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:04.353089Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:04.353234Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:04.372219Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:04.664151Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T11:58:04.719994Z node 6 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [6:1149:2963] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-10T11:58:12.922534Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:12.927815Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-10T11:58:12.933306Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:295:2340], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:58:12.933802Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:12.933872Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:58:13.248528Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:13.248690Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:13.250342Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:36:2083] 1768046289510888 != 1768046289510892 2026-01-10T11:58:13.263147Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:13.317361Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:13.394935Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:58:13.736713Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:13.758044Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:13.891874Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:15.106152Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:15.106561Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1086:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:15.106871Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:15.110708Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:15.110837Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:15.126753Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:15.345183Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T11:58:15.400459Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:1149:2963] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:58:17.417057Z node 7 :TX_DATASHARD WARN: datashard__vacuum.cpp:37: Vacuum of tablet# 72075186224037888: has borrowed parts, requested from [7:832:2727] |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_vacuum/unittest >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> test.py::test[solomon-UnknownSetting-] [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TestFilterSet::FilterGroup >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast >> DataShardDiskQuotas::DiskQuotaExceeded >> TFetchRequestTests::EmptyTopic [GOOD] >> TFetchRequestTests::BadTopicName |81.9%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |81.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/tests/sql/solomon/pytest |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |82.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_vacuum/unittest |82.0%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> TestFilterSet::FilterGroup [GOOD] >> TestFilterSet::DuplicationValidation >> TDescriberTests::TopicExists [GOOD] >> TDescriberTests::TopicNotExists |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/kqprun/tests/py3test >> TMLPChangerTests::ConsumerNotExists [GOOD] >> TMLPChangerTests::PartitionNotExists >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2026-01-10T11:57:00.506756Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700694223760016:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:00.506825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:57:01.852150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:01.905377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:57:01.908959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:01.938150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:01.938300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:02.181946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:02.921671Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:02.940729Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593700694223759987:2081] 1768046220505603 != 1768046220505606 2026-01-10T11:57:02.953079Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.100443s 2026-01-10T11:57:02.953388Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.100763s 2026-01-10T11:57:03.001109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:57:03.478814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:57:03.478835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:57:03.478843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:57:03.479223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:57:05.512649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593700694223760016:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:05.512718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:57:07.753372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:07.782893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T11:57:16.892638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:57:16.892660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:17.967827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:17.977932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:19.148273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:19.153188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-10T11:57:19.203278Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700775828139834:2721] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T11:57:19.392473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2026-01-10T11:57:19.418582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:19.420026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1768046239238, "myFolder", "{\"k1\": \"v1\"}"); 2026-01-10T11:57:19.513475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700775828140011:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:19.513475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700775828140003:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:19.513598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:19.513892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700775828140018:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:19.513937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:19.516593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:19.524997Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593700775828140017:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2026-01-10T11:57:19.595216Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700775828140072:2874] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 52], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:19.238000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:19.238000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:20.281117Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-10T11:57:20.796380Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700780123107575:2953] txid# 281474976710680, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/SingleCreateQueueEvent\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/SingleCreateQueueEvent', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T11:57:20.816772Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700780123107584:2958] txid# 281474976710681, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:57:20.826810Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700780123107591:2963] txid# 281474976710682, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 48], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute q ... tributes":{"labels":{"k1": "v1"}}} Error 1: Check failed: path: '/Root/SQS/Root/SQS/DeletedQueueNotReindexed', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T11:57:27.319356Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700810187879515:3428] txid# 281474976710721, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/DeletedQueueNotReindexed\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:57:27.460198Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700810187879525:3433] txid# 281474976710722, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 60], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:57:27.460377Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700810187879531:3437] txid# 281474976710723, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 59], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "queue2", "myQueueCustomName", 1768046245000, "myFolder", NULL); ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue2", 0, "myQueueCustomName", 1768046246078, "myFolder", NULL); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:26.078000Z","resource_id":"queue2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-10T11:57:26.078000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2026-01-10T11:57:30.430130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710730:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:30.467805Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700823072781564:3508] txid# 281474976710731, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T11:57:31.838760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710732, at schemeshard: 72057594046644480 2026-01-10T11:57:32.140550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710733:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:32.199174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710734:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing1", "myQueueCustomName", 1768046249000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing2", "myQueueCustomName", 1768046249000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing3", "myQueueCustomName", 1768046249000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting1", "myQueueCustomName", 1768046249000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting2", "myQueueCustomName", 1768046249000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting1", 0, "myQueueCustomName", 1768046250514, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting2", 0, "myQueueCustomName", 1768046250514, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating1", 1, "myQueueCustomName", 1768046250514, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating2", 1, "myQueueCustomName", 1768046250514, "myFolder", "{\"k1\": \"v1\"}"); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-10T11:57:30.514000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-10T11:57:30.514000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:29.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:38.190516Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:29.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:38.190593Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:38.190634Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:29.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:38.190659Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-10T11:57:30.514000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-10T11:57:38.190684Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-10T11:57:40.051260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710762:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:40.060772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710762, at schemeshard: 72057594046644480 Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T11:57:40.082762Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700866022455198:3843] txid# 281474976710763, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:57:41.724802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710764, at schemeshard: 72057594046644480 2026-01-10T11:57:41.765453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710765:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:41.767876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710766:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Started add queue batch |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/ymq/actor/yc_search_ut/unittest >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TTxDataShardBuildFulltextDictScan::BadRequest >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 6700075604924353865 Reassign# 1 -- VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 1 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:38:0] Put# [1:1:2:0:0:42:0] Put# [1:1:3:0:0:9:0] 2026-01-10T11:55:39.253150Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:39.255810Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5928929512383896768] 2026-01-10T11:55:39.267606Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:1:0:0:38:5] 2026-01-10T11:55:39.267701Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:2:0:0:42:6] 2026-01-10T11:55:39.267985Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 2 PartsResurrected# 2 Put# [1:1:4:0:0:63:0] Put# [1:1:5:0:0:23:0] Put# [1:1:6:0:0:1:0] Put# [1:1:7:0:0:93:0] Put# [1:1:8:0:0:14:0] Put# [1:1:9:0:0:12:0] Put# [1:1:10:0:0:63:0] Put# [1:1:11:0:0:88:0] Put# [1:1:12:0:0:41:0] Put# [1:1:13:0:0:74:0] Put# [1:1:14:0:0:37:0] Put# [1:1:15:0:0:83:0] Put# [1:1:16:0:0:59:0] Put# [1:1:17:0:0:42:0] Put# [1:1:18:0:0:83:0] Put# [1:1:19:0:0:85:0] Put# [1:1:20:0:0:5:0] Put# [1:1:21:0:0:26:0] Put# [1:1:22:0:0:63:0] Put# [1:1:23:0:0:74:0] Put# [1:1:24:0:0:54:0] Put# [1:1:25:0:0:49:0] Put# [1:1:26:0:0:100:0] Put# [1:1:27:0:0:71:0] Put# [1:1:28:0:0:43:0] Put# [1:1:29:0:0:49:0] Put# [1:1:30:0:0:12:0] Put# [1:1:31:0:0:12:0] Put# [1:1:32:0:0:19:0] Put# [1:1:33:0:0:21:0] Put# [1:1:34:0:0:13:0] Put# [1:1:35:0:0:59:0] Put# [1:1:36:0:0:65:0] Put# [1:1:37:0:0:40:0] Put# [1:1:38:0:0:86:0] Put# [1:1:39:0:0:48:0] Put# [1:1:40:0:0:57:0] Put# [1:1:41:0:0:10:0] Put# [1:1:42:0:0:71:0] Put# [1:1:43:0:0:93:0] Put# [1:1:44:0:0:29:0] Put# [1:1:45:0:0:67:0] Put# [1:1:46:0:0:76:0] Put# [1:1:47:0:0:92:0] Put# [1:1:48:0:0:28:0] Put# [1:1:49:0:0:32:0] Put# [1:1:50:0:0:4:0] Put# [1:1:51:0:0:4:0] Put# [1:1:52:0:0:46:0] Put# [1:1:53:0:0:75:0] Put# [1:1:54:0:0:99:0] Put# [1:1:55:0:0:80:0] Put# [1:1:56:0:0:7:0] Put# [1:1:57:0:0:6:0] Put# [1:1:58:0:0:7:0] Put# [1:1:59:0:0:64:0] Put# [1:1:60:0:0:33:0] Put# [1:1:61:0:0:97:0] Put# [1:1:62:0:0:14:0] Put# [1:1:63:0:0:94:0] Put# [1:1:64:0:0:14:0] Put# [1:1:65:0:0:97:0] Put# [1:1:66:0:0:35:0] Put# [1:1:67:0:0:91:0] Put# [1:1:68:0:0:48:0] Put# [1:1:69:0:0:8:0] Put# [1:1:70:0:0:15:0] Put# [1:1:71:0:0:88:0] Put# [1:1:72:0:0:64:0] Put# [1:1:73:0:0:37:0] Put# [1:1:74:0:0:37:0] Put# [1:1:75:0:0:9:0] Put# [1:1:76:0:0:29:0] Put# [1:1:77:0:0:10:0] Put# [1:1:78:0:0:83:0] Put# [1:1:79:0:0:9:0] Put# [1:1:80:0:0:34:0] Put# [1:1:81:0:0:64:0] Put# [1:1:82:0:0:23:0] Put# [1:1:83:0:0:34:0] Put# [1:1:84:0:0:21:0] Put# [1:1:85:0:0:2:0] Put# [1:1:86:0:0:61:0] Put# [1:1:87:0:0:92:0] Put# [1:1:88:0:0:43:0] Put# [1:1:89:0:0:98:0] Put# [1:1:90:0:0:49:0] Put# [1:1:91:0:0:59:0] Put# [1:1:92:0:0:46:0] Put# [1:1:93:0:0:71:0] Put# [1:1:94:0:0:82:0] Put# [1:1:95:0:0:72:0] Put# [1:1:96:0:0:86:0] Put# [1:1:97:0:0:65:0] Put# [1:1:98:0:0:23:0] Put# [1:1:99:0:0:74:0] Put# [1:1:100:0:0:19:0] Put# [1:1:101:0:0:83:0] Put# [1:1:102:0:0:75:0] Put# [1:1:103:0:0:74:0] Put# [1:1:104:0:0:68:0] Put# [1:1:105:0:0:66:0] Put# [1:1:106:0:0:50:0] Put# [1:1:107:0:0:75:0] Put# [1:1:108:0:0:27:0] Put# [1:1:109:0:0:13:0] Put# [1:1:110:0:0:93:0] Put# [1:1:111:0:0:62:0] Put# [1:1:112:0:0:21:0] Put# [1:1:113:0:0:83:0] Put# [1:1:114:0:0:46:0] Put# [1:1:115:0:0:66:0] Put# [1:1:116:0:0:75:0] Put# [1:1:117:0:0:44:0] Put# [1:1:118:0:0:57:0] Put# [1:1:119:0:0:99:0] Put# [1:1:120:0:0:93:0] Put# [1:1:121:0:0:96:0] Put# [1:1:122:0:0:36:0] Put# [1:1:123:0:0:84:0] Put# [1:1:124:0:0:15:0] Put# [1:1:125:0:0:2:0] Put# [1:1:126:0:0:33:0] Put# [1:1:127:0:0:34:0] Put# [1:1:128:0:0:12:0] Put# [1:1:129:0:0:17:0] Put# [1:1:130:0:0:25:0] Put# [1:1:131:0:0:30:0] Put# [1:1:132:0:0:71:0] Put# [1:1:133:0:0:67:0] Put# [1:1:134:0:0:8:0] Put# [1:1:135:0:0:9:0] Put# [1:1:136:0:0:90:0] Put# [1:1:137:0:0:44:0] Put# [1:1:138:0:0:11:0] Put# [1:1:139:0:0:96:0] Put# [1:1:140:0:0:47:0] Put# [1:1:141:0:0:92:0] Put# [1:1:142:0:0:1:0] Put# [1:1:143:0:0:98:0] Put# [1:1:144:0:0:19:0] Put# [1:1:145:0:0:26:0] Put# [1:1:146:0:0:65:0] Put# [1:1:147:0:0:61:0] Put# [1:1:148:0:0:19:0] Put# [1:1:149:0:0:17:0] Put# [1:1:150:0:0:41:0] Put# [1:1:151:0:0:9:0] Put# [1:1:152:0:0:52:0] Put# [1:1:153:0:0:83:0] Put# [1:1:154:0:0:53:0] Put# [1:1:155:0:0:27:0] Put# [1:1:156:0:0:83:0] Put# [1:1:157:0:0:80:0] Put# [1:1:158:0:0:81:0] Put# [1:1:159:0:0:6:0] Put# [1:1:160:0:0:69:0] Put# [1:1:161:0:0:31:0] Put# [1:1:162:0:0:59:0] Put# [1:1:163:0:0:62:0] Put# [1:1:164:0:0:25:0] Put# [1:1:165:0:0:17:0] Put# [1:1:166:0:0:58:0] Put# [1:1:167:0:0:58:0] Put# [1:1:168:0:0:74:0] Put# [1:1:169:0:0:47:0] Put# [1:1:170:0:0:44:0] Put# [1:1:171:0:0:23:0] Put# [1:1:172:0:0:49:0] Put# [1:1:173:0:0:36:0] Put# [1:1:174:0:0:54:0] Put# [1:1:175:0:0:7:0] Put# [1:1:176:0:0:36:0] Put# [1:1:177:0:0:43:0] Put# [1:1:178:0:0:50:0] Put# [1:1:179:0:0:61:0] Put# [1:1:180:0:0:1:0] Put# [1:1:181:0:0:4:0] Put# [1:1:182:0:0:82:0] Put# [1:1:183:0:0:48:0] Put# [1:1:184:0:0:9:0] Put# [1:1:185:0:0:61:0] Put# [1:1:186:0:0:93:0] Put# [1:1:187:0:0:34:0] Put# [1:1:188:0:0:9:0] Put# [1:1:189:0:0:26:0] Put# [1:1:190:0:0:76:0] Put# [1:1:191:0:0:82:0] Put# [1:1:192:0:0:76:0] Put# [1:1:193:0:0:18:0] Put# [1:1:194:0:0:36:0] Put# [1:1:195:0:0:2:0] Put# [1:1:196:0:0:78:0] Put# [1:1:197:0:0:65:0] Put# [1:1:198:0:0:32:0] Put# [1:1:199:0:0:79:0] Put# [1:1:200:0:0:18:0] Put# [1:1:201:0:0:22:0] Put# [1:1:202:0:0:44:0] Put# [1:1:203:0:0:33:0] Put# [1:1:204:0:0:55:0] Put# [1:1:205:0:0:91:0] Put# [1:1:206:0:0:27:0] Put# [1:1:207:0:0:3:0] Put# [1:1:208:0:0:25:0] Put# [1:1:209:0:0:62:0] Put# [1:1:210:0:0:97:0] Put# [1:1:211:0:0:83:0] Put# [1:1:212:0:0:45:0] Put# [1:1:213:0:0:45:0] Put# [1:1:214:0:0:91:0] Put# [1:1:215:0:0:22:0] Put# [1:1:216:0:0:17:0] Put# [1:1:217:0:0:3:0] Put# [1:1:218:0:0:16:0] Put# [1:1:219:0:0:17:0] Put# [1:1:220:0:0:67:0] Put# [1:1:221:0:0:32:0] Put# [1:1:222:0:0:48:0] Put# [1:1:223:0:0:9:0] Put# [1:1:224:0:0:23:0] Put# [1:1:225:0:0:94:0] Put# [1:1:226:0:0:34:0] Put# [1:1:227:0:0:53:0] Put# [1:1:228:0:0:52:0] Put# [1:1:229:0:0:14:0] Put# [1:1:230:0:0:2:0] Put# [1:1:231:0:0:42:0] Put# [1:1:232:0:0:9:0] Put# [1:1:233:0:0:41:0] Put# [1:1:234:0:0:5:0] Put# [1:1:235:0:0:36:0] Put# [1:1:236:0:0:40:0] Put# [1:1:237:0:0:18:0] Put# [1:1:238:0:0:1:0] Put# [1:1:239:0:0:36:0] Put# [1:1:240:0:0:35:0] Put# [1:1:241:0:0:17:0] Put# [1:1:242:0:0:33:0] Put# [1:1:243:0:0:86:0] Put# [1:1:244:0:0:27:0] Put# [1:1:245:0:0:41:0] Put# [1:1:246:0:0:66:0] Put# [1:1:247:0:0:86:0] Put# [1:1:248:0:0:93:0] Put# [1:1:249:0:0:100:0] Put# [1:1:250:0:0:94:0] Put# [1:1:251:0:0:29:0] Put# [1:1:252:0:0:53:0] Put# [1:1:253:0:0:88:0] Put# [1:1:254:0:0:42:0] Put# [1:1:255:0:0:12:0] Put# [1:1:256:0:0:11:0] Put# [1:1:257:0:0:96:0] Put# [1:1:258:0:0:84:0] Put# [1:1:259:0:0:7:0] Put# [1:1:260:0:0:69:0] Put# [1:1:261:0:0:1:0] Put# [1:1:262:0:0:27:0] Put# [1:1:263:0:0:46:0] Put# [1:1:264:0:0:89:0] Put# [1:1:265:0:0:30:0] Put# [1:1:266:0:0:52:0] Put# [1:1:267:0:0:69:0] Put# [1:1:268:0:0:18:0] Put# [1:1:269:0:0:51:0] Put# [1:1:270:0:0:98:0] Put# [1:1:271:0:0:81:0] Put# [1:1:272:0:0:62:0] Put# [1:1:273:0:0:94:0] Put# [1:1:274:0:0:37:0] Put# [1:1:275:0:0:75:0] Put# [1:1:276:0:0:89:0] Put# [1:1:277:0:0:67:0] Put# [1:1:278:0:0:42:0] Put# [1:1:279:0:0:13:0] Put# [1:1:280:0:0:54:0] Put# [1:1:281:0:0:82:0] Put# [1:1:282:0:0:28:0] Put# [1:1:283:0:0:16:0] Put# [1:1:284:0:0:27:0] Put# [1:1:285:0:0:23:0] Put# [1:1:286:0:0:27:0] Put# [1:1:287:0:0:2:0] Put# [1:1:288:0:0:91:0] Put# [1:1:289:0:0:87:0] Put# [1:1:290:0:0:37:0] Put# [1:1:291:0:0:28:0] Put# [1:1:292:0:0:52:0] Put# [1:1:293:0:0:44:0] Put# [1:1:294:0:0:21:0] Put# [1:1:295:0:0:27:0] Put# [1:1:296:0:0:42:0] Put# [1:1:297:0:0:82:0] Put# [1:1:298:0:0:38:0] Put# [1:1:299:0:0:79:0] Put# [1:1:300:0:0:75:0] Put# [1:1:301:0:0:12:0] Put# [1:1:302:0:0:6:0] Put# [1:1:303:0:0:18:0] Put# [1:1:304:0:0:29:0] Put# [1:1:305:0:0:53:0] Put# [1:1:306:0:0:55:0] Put# [1:1:307:0:0:99:0] Put# [1:1:308:0:0:89:0] Put# [1:1:309:0:0:43:0] Put# [1:1:310:0:0:90:0] Put# [1:1:311:0:0:88:0] Put# [1:1:312:0:0:33:0] Put# [1:1:313:0:0:78:0] Put# [1:1:314:0:0:87:0] Put# [1:1:315:0:0:62:0] Put# [1:1:316:0:0:49:0] Put# [1:1:317:0:0:33:0] Put# [1:1:318:0:0:56:0] Put# [1:1:319:0:0:48:0] Put# [1:1:320:0:0:39:0] Put# [1:1:321:0:0:81:0] Put# [1:1:322:0:0:19:0] Put# [1:1:323:0:0:76:0] Put# [1:1:324:0:0:40:0] Put# [1:1:325:0:0:97:0] Put# [1:1:326:0:0:45:0] Put# [1:1:327:0:0:99:0] Put# [1:1:328:0:0:4:0] Put# [1:1:329:0:0:83:0] Put# [1:1:330:0:0:73:0] Put# [1:1:331:0:0:20:0] Put# [1:1:332:0:0:14:0] Put# [1:1:333:0:0:40:0] Put# [1:1:334:0:0:49:0] Put# [1:1:335:0:0:59:0] Put# [1:1:336:0:0:45:0] Put# [1:1:337:0:0:70:0] Put# [1:1:338:0:0:60:0] Put# [1:1:339:0:0:70:0] Put# [1:1:340:0:0:39:0] Put# [1:1:341:0:0:31:0] Put# [1:1:342:0:0:89:0] Put# [1:1:343:0:0:84:0] Put# [1:1:344:0:0:42:0] Put# [1:1:345:0:0:78:0] Put# [1:1:346:0:0:77:0] Put# [1:1:347:0:0:10:0] Put# [1:1:348:0:0:24:0] Put# [1:1:349:0:0:75:0] Put# [1:1:350:0:0:58:0] Put# [1:1:351:0:0:13:0] Put# [1:1:352:0:0:24:0] Put# [1:1:353:0:0:6:0] Put# [1:1:354:0:0:85:0] Put# [1:1:355:0:0:60:0] Put# [1:1:356:0:0:32:0] Put# [1:1:357:0:0:16:0] Put# [1:1:358:0:0:53:0] Put# [1:1:359:0:0:95:0] Put# [1:1:360:0:0:11:0] Put# [1:1:361:0:0:60:0] Put# [1:1:362:0:0:33:0] Put# [1:1:363:0:0:32:0] Put# [1:1:364:0:0:16:0] Put# [1:1:365:0:0:38:0] Put# [1:1:366:0:0:21:0] Put# [1:1:367:0:0:7:0] Put# [1:1:368:0:0:53:0] Put# [1:1:369:0:0:20:0] Put# [1:1:370:0:0:50:0] Put# [1:1:371:0:0:86:0] Put# [1:1:372:0:0:62:0] Put# [1:1:373:0:0:50:0] Put# [1:1:374:0:0:94:0] Put# [1:1:375:0:0:44:0] Put# [1:1:376:0:0:4:0] Put# [1:1:377:0:0:21:0] Put# [1:1:378:0:0:17:0] Put# [1:1:379:0:0:26:0] Put# [1:1:380:0:0:96:0] Put# [1:1:381:0:0:7:0] Put# [1:1:382:0:0:48:0] Put# [1:1:383:0:0:91:0] Put# [1:1:384:0:0:27:0] Put# [1:1:385:0:0:49:0] Put# [1:1:386:0:0:73:0] Put# [1:1:387:0:0:47:0] Put# [1:1:388:0:0:12:0] Put# [1:1:389:0:0:26:0] Put# [1:1:390:0:0:62:0] Put# [1:1:391:0:0:92:0] Put# [1:1:392:0:0:26:0] Put# [1:1:393:0:0:15:0] Put# [1:1:394:0:0:3:0] Put# [1:1:395:0:0:33:0] Put# [1:1:396:0:0:69:0] Put# [1:1:397:0:0:3:0] Put# [1:1:398:0:0:39:0] Put# [1:1:399:0:0:77:0] Put# [1:1:400:0:0:10:0] Put# [1:1:401:0:0:86:0] Put# [1:1:402:0:0:4:0] Put# [1:1:403:0:0:12:0] Put# [1:1:404:0:0:19:0] Put# [1:1:405:0:0:90:0] Put# [1:1:406:0:0:97:0] Put# [1:1:407:0:0:97:0] Put# [1:1:408:0:0:37:0] Put# [1:1:409:0:0:65:0] Put# [1:1:410:0:0:59:0] Put# [1:1:411:0:0:12:0] Put# [1:1:412:0:0:43:0] Put# [1:1:413:0:0:84:0] Put# [1:1:414:0:0:86:0] Put# [1:1:415:0:0:92:0] Put# [1:1:416:0:0:56:0] Put# [1:1:417:0:0:27:0] Put# [1:1:418:0:0:61:0] Put# [1:1:419:0:0:96:0] Put# [1:1:420:0:0:100:0] Put# [1:1:421:0:0:74:0] Put# [1:1:422:0:0:19:0] Put# [1:1:423:0:0:48:0] Put# [1:1:424:0:0:86:0] Put# [1:1:425:0:0:75:0] Put# [1:1:426:0:0:42:0] Put# [1:1:427:0:0:37:0] Put# [1:1:428:0:0:37:0] Put# [1:1:429:0:0:3:0] Put# [1:1:430:0:0:93:0] Put# [1:1:431:0:0:27:0] Put# [1:1:432:0:0:59:0] Put# [1:1:433:0:0:28:0] Put# [1:1:434:0:0:7:0] Put# [1:1:435:0:0:24:0] Put# [1:1:436:0:0:37:0] Put# [1:1:437:0:0:30:0] Put# [1:1:438:0:0:83:0] Put# [1:1:439:0:0:73:0] Put# [1:1:440:0:0:8:0] Put# [1:1:441:0:0:53:0] Put# [1:1:442:0:0:6:0] Put# [1:1:443:0:0:49:0] Put# [1:1:444:0:0:83:0] Put# [1:1:445:0:0:18:0] Put# [1:1:446:0:0:42:0] Put# [1:1:447:0:0:81:0] Put# [1:1:448:0:0:17:0] Put# [1:1:449:0:0:31:0] Put# [1:1:450:0:0:19:0] Put# [1:1:451:0:0:51:0] Put# [1:1:452:0:0:95:0] Put# [1:1:453:0:0:81: ... 1:3:7941:0:0:81:0] Put# [1:3:7942:0:0:88:0] Put# [1:3:7943:0:0:41:0] Put# [1:3:7944:0:0:44:0] Put# [1:3:7945:0:0:78:0] Put# [1:3:7946:0:0:26:0] Put# [1:3:7947:0:0:65:0] Put# [1:3:7948:0:0:40:0] Put# [1:3:7949:0:0:52:0] Put# [1:3:7950:0:0:3:0] Put# [1:3:7951:0:0:89:0] Put# [1:3:7952:0:0:91:0] Put# [1:3:7953:0:0:76:0] Put# [1:3:7954:0:0:56:0] Put# [1:3:7955:0:0:93:0] Put# [1:3:7956:0:0:91:0] Put# [1:3:7957:0:0:8:0] Put# [1:3:7958:0:0:34:0] Put# [1:3:7959:0:0:90:0] Put# [1:3:7960:0:0:76:0] Put# [1:3:7961:0:0:78:0] Put# [1:3:7962:0:0:99:0] Put# [1:3:7963:0:0:68:0] Put# [1:3:7964:0:0:91:0] Put# [1:3:7965:0:0:78:0] Put# [1:3:7966:0:0:34:0] Put# [1:3:7967:0:0:58:0] Put# [1:3:7968:0:0:75:0] Put# [1:3:7969:0:0:45:0] Put# [1:3:7970:0:0:43:0] Put# [1:3:7971:0:0:39:0] Put# [1:3:7972:0:0:64:0] Put# [1:3:7973:0:0:1:0] Put# [1:3:7974:0:0:84:0] Put# [1:3:7975:0:0:18:0] Put# [1:3:7976:0:0:39:0] Put# [1:3:7977:0:0:69:0] Put# [1:3:7978:0:0:37:0] Put# [1:3:7979:0:0:2:0] Put# [1:3:7980:0:0:14:0] Put# [1:3:7981:0:0:40:0] Put# [1:3:7982:0:0:53:0] Put# [1:3:7983:0:0:59:0] Put# [1:3:7984:0:0:80:0] Put# [1:3:7985:0:0:26:0] Put# [1:3:7986:0:0:31:0] Put# [1:3:7987:0:0:67:0] Put# [1:3:7988:0:0:76:0] Put# [1:3:7989:0:0:72:0] Put# [1:3:7990:0:0:43:0] Put# [1:3:7991:0:0:48:0] Put# [1:3:7992:0:0:21:0] Put# [1:3:7993:0:0:70:0] Put# [1:3:7994:0:0:59:0] Put# [1:3:7995:0:0:24:0] Put# [1:3:7996:0:0:3:0] Put# [1:3:7997:0:0:99:0] Put# [1:3:7998:0:0:61:0] Put# [1:3:7999:0:0:97:0] Put# [1:3:8000:0:0:67:0] Put# [1:3:8001:0:0:5:0] Put# [1:3:8002:0:0:28:0] Put# [1:3:8003:0:0:28:0] Put# [1:3:8004:0:0:67:0] Put# [1:3:8005:0:0:87:0] Put# [1:3:8006:0:0:70:0] Put# [1:3:8007:0:0:67:0] Put# [1:3:8008:0:0:14:0] Put# [1:3:8009:0:0:87:0] Put# [1:3:8010:0:0:4:0] Put# [1:3:8011:0:0:97:0] Put# [1:3:8012:0:0:4:0] Put# [1:3:8013:0:0:98:0] Put# [1:3:8014:0:0:70:0] Put# [1:3:8015:0:0:54:0] Put# [1:3:8016:0:0:42:0] Put# [1:3:8017:0:0:38:0] Put# [1:3:8018:0:0:75:0] Put# [1:3:8019:0:0:9:0] Put# [1:3:8020:0:0:48:0] Put# [1:3:8021:0:0:99:0] Put# [1:3:8022:0:0:24:0] Put# [1:3:8023:0:0:16:0] Put# [1:3:8024:0:0:6:0] Put# [1:3:8025:0:0:26:0] Put# [1:3:8026:0:0:8:0] Put# [1:3:8027:0:0:63:0] Put# [1:3:8028:0:0:54:0] Put# [1:3:8029:0:0:32:0] Put# [1:3:8030:0:0:41:0] Put# [1:3:8031:0:0:80:0] Put# [1:3:8032:0:0:83:0] Put# [1:3:8033:0:0:38:0] Put# [1:3:8034:0:0:44:0] Put# [1:3:8035:0:0:46:0] Put# [1:3:8036:0:0:94:0] Put# [1:3:8037:0:0:52:0] Put# [1:3:8038:0:0:43:0] Put# [1:3:8039:0:0:8:0] Put# [1:3:8040:0:0:20:0] Put# [1:3:8041:0:0:44:0] Put# [1:3:8042:0:0:41:0] Put# [1:3:8043:0:0:70:0] Put# [1:3:8044:0:0:82:0] Put# [1:3:8045:0:0:74:0] Put# [1:3:8046:0:0:51:0] Put# [1:3:8047:0:0:59:0] Put# [1:3:8048:0:0:76:0] Put# [1:3:8049:0:0:71:0] Put# [1:3:8050:0:0:80:0] Put# [1:3:8051:0:0:79:0] Put# [1:3:8052:0:0:2:0] Put# [1:3:8053:0:0:94:0] Put# [1:3:8054:0:0:36:0] Put# [1:3:8055:0:0:34:0] Put# [1:3:8056:0:0:54:0] Put# [1:3:8057:0:0:22:0] Put# [1:3:8058:0:0:40:0] Put# [1:3:8059:0:0:20:0] Put# [1:3:8060:0:0:88:0] Put# [1:3:8061:0:0:40:0] Put# [1:3:8062:0:0:90:0] Put# [1:3:8063:0:0:82:0] Put# [1:3:8064:0:0:76:0] Put# [1:3:8065:0:0:1:0] Put# [1:3:8066:0:0:60:0] Put# [1:3:8067:0:0:52:0] Put# [1:3:8068:0:0:82:0] Put# [1:3:8069:0:0:52:0] Put# [1:3:8070:0:0:68:0] Put# [1:3:8071:0:0:51:0] Put# [1:3:8072:0:0:24:0] Put# [1:3:8073:0:0:53:0] Put# [1:3:8074:0:0:19:0] Put# [1:3:8075:0:0:30:0] Put# [1:3:8076:0:0:78:0] Put# [1:3:8077:0:0:92:0] Put# [1:3:8078:0:0:100:0] Put# [1:3:8079:0:0:93:0] Put# [1:3:8080:0:0:64:0] Put# [1:3:8081:0:0:24:0] Put# [1:3:8082:0:0:98:0] Put# [1:3:8083:0:0:77:0] Put# [1:3:8084:0:0:50:0] Put# [1:3:8085:0:0:35:0] Put# [1:3:8086:0:0:99:0] Put# [1:3:8087:0:0:64:0] Put# [1:3:8088:0:0:93:0] Put# [1:3:8089:0:0:92:0] Put# [1:3:8090:0:0:86:0] Put# [1:3:8091:0:0:85:0] Put# [1:3:8092:0:0:66:0] Put# [1:3:8093:0:0:66:0] Put# [1:3:8094:0:0:40:0] Put# [1:3:8095:0:0:9:0] Put# [1:3:8096:0:0:88:0] Put# [1:3:8097:0:0:69:0] Put# [1:3:8098:0:0:93:0] Put# [1:3:8099:0:0:89:0] Put# [1:3:8100:0:0:62:0] Put# [1:3:8101:0:0:45:0] Put# [1:3:8102:0:0:18:0] Put# [1:3:8103:0:0:28:0] Put# [1:3:8104:0:0:60:0] Put# [1:3:8105:0:0:88:0] Put# [1:3:8106:0:0:24:0] Put# [1:3:8107:0:0:77:0] Put# [1:3:8108:0:0:71:0] Put# [1:3:8109:0:0:69:0] Put# [1:3:8110:0:0:65:0] Put# [1:3:8111:0:0:50:0] Put# [1:3:8112:0:0:100:0] Put# [1:3:8113:0:0:6:0] Put# [1:3:8114:0:0:32:0] Put# [1:3:8115:0:0:41:0] Put# [1:3:8116:0:0:1:0] Put# [1:3:8117:0:0:12:0] Put# [1:3:8118:0:0:84:0] Put# [1:3:8119:0:0:38:0] Put# [1:3:8120:0:0:22:0] Put# [1:3:8121:0:0:56:0] Put# [1:3:8122:0:0:37:0] Put# [1:3:8123:0:0:69:0] Put# [1:3:8124:0:0:39:0] Put# [1:3:8125:0:0:16:0] Put# [1:3:8126:0:0:40:0] Put# [1:3:8127:0:0:26:0] Put# [1:3:8128:0:0:83:0] Put# [1:3:8129:0:0:14:0] Put# [1:3:8130:0:0:66:0] Put# [1:3:8131:0:0:47:0] Put# [1:3:8132:0:0:39:0] Put# [1:3:8133:0:0:27:0] Put# [1:3:8134:0:0:99:0] Put# [1:3:8135:0:0:50:0] Put# [1:3:8136:0:0:21:0] Put# [1:3:8137:0:0:38:0] Put# [1:3:8138:0:0:98:0] Put# [1:3:8139:0:0:1:0] Put# [1:3:8140:0:0:14:0] Put# [1:3:8141:0:0:29:0] Put# [1:3:8142:0:0:78:0] Put# [1:3:8143:0:0:21:0] Put# [1:3:8144:0:0:53:0] Put# [1:3:8145:0:0:97:0] Put# [1:3:8146:0:0:58:0] Put# [1:3:8147:0:0:53:0] Put# [1:3:8148:0:0:5:0] Put# [1:3:8149:0:0:21:0] Put# [1:3:8150:0:0:19:0] Put# [1:3:8151:0:0:47:0] Put# [1:3:8152:0:0:40:0] Put# [1:3:8153:0:0:72:0] Put# [1:3:8154:0:0:19:0] Put# [1:3:8155:0:0:54:0] Put# [1:3:8156:0:0:97:0] Put# [1:3:8157:0:0:52:0] Put# [1:3:8158:0:0:61:0] Put# [1:3:8159:0:0:12:0] Put# [1:3:8160:0:0:62:0] Put# [1:3:8161:0:0:85:0] Put# [1:3:8162:0:0:52:0] Put# [1:3:8163:0:0:9:0] Put# [1:3:8164:0:0:65:0] Put# [1:3:8165:0:0:30:0] Put# [1:3:8166:0:0:35:0] Put# [1:3:8167:0:0:26:0] Put# [1:3:8168:0:0:55:0] Put# [1:3:8169:0:0:47:0] Put# [1:3:8170:0:0:63:0] Put# [1:3:8171:0:0:25:0] Put# [1:3:8172:0:0:45:0] Put# [1:3:8173:0:0:65:0] Put# [1:3:8174:0:0:79:0] Put# [1:3:8175:0:0:55:0] Put# [1:3:8176:0:0:68:0] Put# [1:3:8177:0:0:39:0] Put# [1:3:8178:0:0:31:0] Put# [1:3:8179:0:0:97:0] Put# [1:3:8180:0:0:56:0] Put# [1:3:8181:0:0:7:0] Put# [1:3:8182:0:0:85:0] Put# [1:3:8183:0:0:82:0] Put# [1:3:8184:0:0:19:0] Put# [1:3:8185:0:0:23:0] Put# [1:3:8186:0:0:21:0] Put# [1:3:8187:0:0:93:0] Put# [1:3:8188:0:0:8:0] Put# [1:3:8189:0:0:90:0] Put# [1:3:8190:0:0:59:0] Put# [1:3:8191:0:0:12:0] Put# [1:3:8192:0:0:97:0] Put# [1:3:8193:0:0:79:0] Put# [1:3:8194:0:0:84:0] Put# [1:3:8195:0:0:24:0] Put# [1:3:8196:0:0:37:0] Put# [1:3:8197:0:0:47:0] Put# [1:3:8198:0:0:87:0] Put# [1:3:8199:0:0:95:0] Put# [1:3:8200:0:0:43:0] Put# [1:3:8201:0:0:29:0] Put# [1:3:8202:0:0:100:0] Put# [1:3:8203:0:0:22:0] Put# [1:3:8204:0:0:81:0] Put# [1:3:8205:0:0:2:0] Put# [1:3:8206:0:0:74:0] Put# [1:3:8207:0:0:7:0] Put# [1:3:8208:0:0:95:0] Put# [1:3:8209:0:0:6:0] Put# [1:3:8210:0:0:78:0] Put# [1:3:8211:0:0:8:0] Put# [1:3:8212:0:0:39:0] Put# [1:3:8213:0:0:6:0] Put# [1:3:8214:0:0:18:0] Put# [1:3:8215:0:0:60:0] Put# [1:3:8216:0:0:88:0] Put# [1:3:8217:0:0:37:0] Put# [1:3:8218:0:0:57:0] Put# [1:3:8219:0:0:97:0] Put# [1:3:8220:0:0:52:0] Put# [1:3:8221:0:0:69:0] Put# [1:3:8222:0:0:1:0] Put# [1:3:8223:0:0:96:0] Put# [1:3:8224:0:0:64:0] Put# [1:3:8225:0:0:70:0] Put# [1:3:8226:0:0:71:0] Put# [1:3:8227:0:0:32:0] Put# [1:3:8228:0:0:21:0] Put# [1:3:8229:0:0:40:0] Put# [1:3:8230:0:0:84:0] Put# [1:3:8231:0:0:40:0] Put# [1:3:8232:0:0:35:0] Put# [1:3:8233:0:0:90:0] Put# [1:3:8234:0:0:23:0] Put# [1:3:8235:0:0:23:0] Put# [1:3:8236:0:0:54:0] Put# [1:3:8237:0:0:39:0] Put# [1:3:8238:0:0:24:0] Put# [1:3:8239:0:0:93:0] Put# [1:3:8240:0:0:38:0] Put# [1:3:8241:0:0:26:0] Put# [1:3:8242:0:0:65:0] Put# [1:3:8243:0:0:9:0] Put# [1:3:8244:0:0:86:0] Put# [1:3:8245:0:0:33:0] Put# [1:3:8246:0:0:98:0] Put# [1:3:8247:0:0:6:0] Put# [1:3:8248:0:0:37:0] Put# [1:3:8249:0:0:32:0] Put# [1:3:8250:0:0:25:0] Put# [1:3:8251:0:0:29:0] Put# [1:3:8252:0:0:99:0] Put# [1:3:8253:0:0:6:0] Put# [1:3:8254:0:0:66:0] Put# [1:3:8255:0:0:10:0] Put# [1:3:8256:0:0:11:0] Put# [1:3:8257:0:0:84:0] Put# [1:3:8258:0:0:59:0] Put# [1:3:8259:0:0:99:0] Put# [1:3:8260:0:0:72:0] Put# [1:3:8261:0:0:43:0] Put# [1:3:8262:0:0:41:0] Put# [1:3:8263:0:0:31:0] Put# [1:3:8264:0:0:50:0] Put# [1:3:8265:0:0:79:0] Put# [1:3:8266:0:0:23:0] Put# [1:3:8267:0:0:8:0] Put# [1:3:8268:0:0:3:0] Put# [1:3:8269:0:0:95:0] Put# [1:3:8270:0:0:96:0] Put# [1:3:8271:0:0:29:0] Put# [1:3:8272:0:0:71:0] Put# [1:3:8273:0:0:32:0] Put# [1:3:8274:0:0:34:0] Put# [1:3:8275:0:0:99:0] Put# [1:3:8276:0:0:67:0] Put# [1:3:8277:0:0:12:0] Put# [1:3:8278:0:0:63:0] Put# [1:3:8279:0:0:17:0] Put# [1:3:8280:0:0:95:0] Put# [1:3:8281:0:0:1:0] Put# [1:3:8282:0:0:62:0] Put# [1:3:8283:0:0:12:0] Put# [1:3:8284:0:0:71:0] Put# [1:3:8285:0:0:45:0] Put# [1:3:8286:0:0:9:0] Put# [1:3:8287:0:0:78:0] Put# [1:3:8288:0:0:83:0] Put# [1:3:8289:0:0:56:0] Put# [1:3:8290:0:0:7:0] Put# [1:3:8291:0:0:12:0] Put# [1:3:8292:0:0:32:0] Put# [1:3:8293:0:0:92:0] Put# [1:3:8294:0:0:11:0] Put# [1:3:8295:0:0:84:0] Put# [1:3:8296:0:0:31:0] Put# [1:3:8297:0:0:74:0] Put# [1:3:8298:0:0:9:0] Put# [1:3:8299:0:0:96:0] Put# [1:3:8300:0:0:52:0] Put# [1:3:8301:0:0:11:0] Put# [1:3:8302:0:0:48:0] Put# [1:3:8303:0:0:6:0] Put# [1:3:8304:0:0:48:0] Put# [1:3:8305:0:0:9:0] Put# [1:3:8306:0:0:11:0] Put# [1:3:8307:0:0:87:0] Put# [1:3:8308:0:0:98:0] Put# [1:3:8309:0:0:10:0] Put# [1:3:8310:0:0:35:0] Put# [1:3:8311:0:0:42:0] Put# [1:3:8312:0:0:18:0] Put# [1:3:8313:0:0:71:0] Put# [1:3:8314:0:0:36:0] Put# [1:3:8315:0:0:23:0] Put# [1:3:8316:0:0:35:0] Put# [1:3:8317:0:0:16:0] Put# [1:3:8318:0:0:78:0] Put# [1:3:8319:0:0:55:0] Put# [1:3:8320:0:0:3:0] Put# [1:3:8321:0:0:43:0] Put# [1:3:8322:0:0:20:0] Put# [1:3:8323:0:0:49:0] Put# [1:3:8324:0:0:49:0] Put# [1:3:8325:0:0:9:0] Put# [1:3:8326:0:0:3:0] Put# [1:3:8327:0:0:73:0] Put# [1:3:8328:0:0:32:0] Put# [1:3:8329:0:0:43:0] Put# [1:3:8330:0:0:10:0] Put# [1:3:8331:0:0:73:0] Put# [1:3:8332:0:0:80:0] Put# [1:3:8333:0:0:11:0] Put# [1:3:8334:0:0:14:0] Put# [1:3:8335:0:0:73:0] Put# [1:3:8336:0:0:68:0] Put# [1:3:8337:0:0:96:0] Put# [1:3:8338:0:0:56:0] Put# [1:3:8339:0:0:28:0] Put# [1:3:8340:0:0:41:0] Put# [1:3:8341:0:0:92:0] Put# [1:3:8342:0:0:77:0] Put# [1:3:8343:0:0:72:0] Put# [1:3:8344:0:0:71:0] Put# [1:3:8345:0:0:76:0] Put# [1:3:8346:0:0:39:0] Put# [1:3:8347:0:0:59:0] Put# [1:3:8348:0:0:1:0] Put# [1:3:8349:0:0:41:0] Put# [1:3:8350:0:0:44:0] Put# [1:3:8351:0:0:77:0] Put# [1:3:8352:0:0:20:0] Put# [1:3:8353:0:0:91:0] Put# [1:3:8354:0:0:40:0] Put# [1:3:8355:0:0:90:0] Put# [1:3:8356:0:0:9:0] Put# [1:3:8357:0:0:53:0] Put# [1:3:8358:0:0:96:0] Put# [1:3:8359:0:0:40:0] Put# [1:3:8360:0:0:48:0] Put# [1:3:8361:0:0:66:0] Put# [1:3:8362:0:0:64:0] Put# [1:3:8363:0:0:1:0] Put# [1:3:8364:0:0:74:0] Put# [1:3:8365:0:0:59:0] Put# [1:3:8366:0:0:39:0] Put# [1:3:8367:0:0:76:0] Put# [1:3:8368:0:0:73:0] Put# [1:3:8369:0:0:57:0] Put# [1:3:8370:0:0:76:0] Put# [1:3:8371:0:0:46:0] Put# [1:3:8372:0:0:52:0] Put# [1:3:8373:0:0:39:0] Put# [1:3:8374:0:0:97:0] Put# [1:3:8375:0:0:79:0] Put# [1:3:8376:0:0:65:0] Put# [1:3:8377:0:0:83:0] Put# [1:3:8378:0:0:29:0] Put# [1:3:8379:0:0:48:0] Put# [1:3:8380:0:0:87:0] Put# [1:3:8381:0:0:51:0] Put# [1:3:8382:0:0:73:0] Put# [1:3:8383:0:0:86:0] Put# [1:3:8384:0:0:60:0] Put# [1:3:8385:0:0:80:0] Put# [1:3:8386:0:0:23:0] Put# [1:3:8387:0:0:36:0] Put# [1:3:8388:0:0:31:0] Put# [1:3:8389:0:0:14:0] Put# [1:3:8390:0:0:12:0] Put# [1:3:8391:0:0:37:0] Put# [1:3:8392:0:0:79:0] Put# [1:3:8393:0:0:67:0] Put# [1:3:8394:0:0:60:0] Put# [1:3:8395:0:0:36:0] Put# [1:3:8396:0:0:67:0] Put# [1:3:8397:0:0:42:0] Put# [1:3:8398:0:0:51:0] Put# [1:3:8399:0:0:65:0] Put# [1:3:8400:0:0:12:0] Put# [1:3:8401:0:0:53:0] Put# [1:3:8402:0:0:5:0] Put# [1:3:8403:0:0:87:0] Put# [1:3:8404:0:0:52:0] Put# [1:3:8405:0:0:96:0] Put# [1:3:8406:0:0:22:0] Put# [1:3:8407:0:0:8:0] Put# [1:3:8408:0:0:72:0] Put# [1:3:8409:0:0:67:0] Put# [1:3:8410:0:0:79:0] Put# [1:3:8411:0:0:52:0] Put# [1:3:8412:0:0:61:0] Put# [1:3:8413:0:0:65:0] Put# [1:3:8414:0:0:40:0] Put# [1:3:8415:0:0:42:0] Put# [1:3:8416:0:0:57:0] Put# [1:3:8417:0:0:30:0] Put# [1:3:8418:0:0:77:0] Put# [1:3:8419:0:0:56:0] Put# [1:3:8420:0:0:80:0] Put# [1:3:8421:0:0:11:0] Put# [1:3:8422:0:0:86:0] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TMLPConsumerTests::ReloadPQTablet [GOOD] >> TMLPConsumerTests::AlterConsumer |82.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.0%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test |82.0%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest |82.0%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> RangeOps::Intersection [GOOD] >> Discovery::DelayedNameserviceResponse [GOOD] >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse >> TestFilterSet::DuplicationValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_range_ops/unittest >> TestFilterSet::CompilationValidation >> DataShardDiskQuotas::DiskQuotaExceeded [GOOD] >> DataShardDiskQuotas::ShardRestartOnCreateTable |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest |82.0%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> TestFilterSet::CompilationValidation [GOOD] >> TestFilterSet::Watermark >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> TTestYqlToMiniKQLCompile::CheckResolve >> TTxDataShardBuildFulltextDictScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |82.0%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TxKeys::ComparePointKeys >> TTestYqlToMiniKQLCompile::Extract [GOOD] |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/yt/kqp_yt_import/py3test >> TDescriberTests::TopicNotExists [GOOD] >> TDescriberTests::TopicNotTopic |82.1%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test |82.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |82.1%| [TS] {BAZEL_UPLOAD} ydb/core/client/minikql_compile/ut/unittest |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.1%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest |82.1%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> TestFilterSet::Watermark [GOOD] >> TestFilterSet::WatermarkWhere >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TMLPChangerTests::PartitionNotExists [GOOD] >> TMLPChangerTests::CommitTest >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> TMLPConsumerTests::AlterConsumer [GOOD] >> TMLPConsumerTests::RecreateConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/discovery/unittest >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] Test command err: Trying to start YDB, gRPC: 9609, MsgBus: 1455 2026-01-10T11:58:21.099575Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701040921667121:2209];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:21.102290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:21.233288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:22.244257Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:22.676233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:22.699740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:22.756219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:22.756310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:22.841543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:22.843453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:22.860537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593701036626699653:2081] 1768046300980721 != 1768046300980724 2026-01-10T11:58:23.102139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:23.461603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:23.461624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:23.461630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:23.461729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:25.251075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:25.335860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:25.529761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:26.506312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701040921667121:2209];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:26.599446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:27.056875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:27.316476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:37.491770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:58:37.491808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:39.755758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701118231080278:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:39.764238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:39.764955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701118231080288:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:39.765639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:40.972497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.064604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.175318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.308345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.455955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.734867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.921387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:41.999740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:43.116849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701135410950394:2522], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:43.116918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:43.118331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701135410950399:2525], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:43.118357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701135410950400:2526], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:43.118864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:43.148283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:43.170693Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701135410950403:2527], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T11:58:43.266607Z node 1 :TX_PROXY ERROR ... [1:7593701135410950454:3891] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12196, MsgBus: 20669 2026-01-10T11:58:55.052719Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593701188999757397:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:55.052766Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:55.073365Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:55.321529Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:55.344157Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:55.345398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:55.345469Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:55.347675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:55.422081Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:55.422265Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:55.422274Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:55.422345Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:55.776291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:55.787942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:55.844993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:55.861321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:56.032812Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:56.104335Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:56.137409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:59.817703Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593701206179628421:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:59.817794Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:59.817971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593701206179628430:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:59.818004Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:59.936664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:59.965839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:59.995136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.026092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.054184Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593701188999757397:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:00.054245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:59:00.054593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.129464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.161794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.211867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:00.273585Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593701210474596603:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:00.273670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:00.273756Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593701210474596608:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:00.273801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593701210474596609:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:00.273838Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:00.276582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:00.285751Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593701210474596612:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T11:59:00.382215Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593701210474596664:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/discovery/unittest >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast >> TestFilterSet::WatermarkWhere [GOOD] >> TestFilterSet::WatermarkWhereFalse >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> SequenceProxy::Basics >> DataShardDiskQuotas::ShardRestartOnCreateTable [GOOD] >> DataShardDiskQuotas::ShardRestartOnSplitDst >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> TFetchRequestTests::CheckAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/fetcher/ut/unittest >> TFetchRequestTests::CheckAccess [GOOD] Test command err: 2026-01-10T11:56:54.535253Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700669660271959:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:56:54.554702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:56:54.670313Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:56:54.955056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:56:54.968440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:56:54.968498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:56:54.971551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:56:55.044238Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:56:55.044512Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593700669660271913:2081] 1768046214491930 != 1768046214491933 2026-01-10T11:56:55.109221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002b80/r3tmp/yandexdiU14q.tmp 2026-01-10T11:56:55.109241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002b80/r3tmp/yandexdiU14q.tmp 2026-01-10T11:56:55.109356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002b80/r3tmp/yandexdiU14q.tmp 2026-01-10T11:56:55.109407Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:56:55.142719Z INFO: TTestServer started on Port 20771 GrpcPort 4489 2026-01-10T11:56:55.177313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:4489 2026-01-10T11:56:55.345779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:56:55.466186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:56:55.643755Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:56:56.040664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T11:56:59.524285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593700669660271959:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:56:59.524587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:57:01.773412Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekw8gys9tkkpkbhfrqsjqwz", Request deadline has expired for 0.661044s seconds 2026-01-10T11:57:01.787158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700699725044033:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.842376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.846457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700699725044062:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.872215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700699725044091:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.872504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.872769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700699725044093:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.872820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:01.874098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:57:01.888286Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593700699725044064:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T11:57:01.957583Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700699725044119:2656] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:57:03.446899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:03.457454Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593700699725044132:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:57:03.459479Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDcxOWExNWItMjMwMDYwNzYtNmVkZWQ4NTgtYWM2YzJlZGE=, ActorId: [1:7593700699725044030:2334], ActorState: ExecuteState, LegacyTraceId: 01kekw8pde7tegke167vkm85x2, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:57:03.480531Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T11:57:03.723882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:04.142507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593700716904913653:2861] 2026-01-10T11:57:09.892052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:57:09.892099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded === CheckClustersList. Ok 2026-01-10T11:57:12.243709Z :HappyWay INFO: TTopicSdkTestSetup started 2026-01-10T11:57:12.267055Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T11:57:12.284686Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593700746969684933:2962] connected; active server actors: 1 2026-01-10T11:57:12.285184Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T11:57:12.292645Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [ ... : partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.314206Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314215Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-10T11:59:12.314242Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-10T11:59:12.314253Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314262Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.314272Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314281Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-10T11:59:12.314301Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-10T11:59:12.314309Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314319Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.314329Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314337Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-10T11:59:12.314358Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T11:59:12.314365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314373Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.314382Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.314390Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-10T11:59:12.416476Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416512Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416528Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416548Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416562Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-10T11:59:12.416602Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416620Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416630Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416639Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:12.416660Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416667Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416674Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416684Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416691Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-10T11:59:12.416717Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416726Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416734Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416742Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-10T11:59:12.416769Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416778Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416785Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416794Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416801Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-10T11:59:12.416824Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T11:59:12.416833Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416839Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.416851Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.416859Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-10T11:59:12.520479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-10T11:59:12.520508Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.520524Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.520543Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.520556Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-10T11:59:12.521089Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:12.521102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521110Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.521121Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:12.521160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:12.521168Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521175Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.521185Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521192Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-10T11:59:12.521211Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-10T11:59:12.521220Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521227Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.521236Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521243Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-10T11:59:12.521261Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-10T11:59:12.521270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521277Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.521286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521293Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-10T11:59:12.521314Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T11:59:12.521323Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521329Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:12.521338Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:12.521344Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/fetcher/ut/unittest >> TestFilterSet::WatermarkWhereFalse [GOOD] >> TestFormatHandler::ManyJsonClients >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::ReconnectsWithKesusAfterSeveralRetries [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.1%| [TM] {RESULT} ydb/core/kqp/ut/discovery/unittest |82.1%| [TM] {RESULT} ydb/core/persqueue/public/fetcher/ut/unittest >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast |82.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/describer/ut/unittest >> TDescriberTests::TopicNotTopic 2026-01-10 11:59:11,252 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 11:59:11,563 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 16733 58.7M 58.7M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/0030a1/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args 16890 1.4G 1.4G 905M └─ ydb-core-persqueue-public-describer-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/0030a1/ydb/core/persqueue/public/describer/ut/test-results/unittest/ytes Test command err: 2026-01-10T11:58:13.211691Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701008425223690:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:13.211749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:13.237353Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:58:13.501035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:13.516059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:13.516166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:13.602766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:13.619555Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593701008425223551:2081] 1768046293201720 != 1768046293201723 2026-01-10T11:58:13.634752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:13.691577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:14.039733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexswCAXO.tmp 2026-01-10T11:58:14.039757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexswCAXO.tmp 2026-01-10T11:58:14.041715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexswCAXO.tmp 2026-01-10T11:58:14.042263Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:14.261975Z INFO: TTestServer started on Port 3579 GrpcPort 23200 2026-01-10T11:58:14.452119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:23200 2026-01-10T11:58:18.047338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:18.212742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701008425223690:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:18.213158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:18.892044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:20.876729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T11:58:20.900671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-10T11:58:25.614055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701059964832187:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.614181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.614352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701059964832200:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.614547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701059964832202:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.614578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.619370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:25.644166Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701059964832204:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T11:58:25.705918Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593701059964832268:2674] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:58:26.589714Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593701059964832276:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:58:26.636511Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzgxYWVkNjYtMjA4MTM5Ni1mOTRmYzE0ZC1mOTM3M2Y5Zg==, ActorId: [1:7593701059964832185:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwb89y9hmbd0yfdeq9rr8d, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:58:26.699715Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T11:58:26.720317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:26.876364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:27.246327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T11:58:28.442105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:58:28.442125Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded === CheckClustersList. Subcribe to ClusterTracker from [1:7593701072849734475:2866] === CheckClustersList. Ok 2026-01-10T11:58:34.704525Z :TopicExists INFO: TTopicSdkTestSetup started 2026-01-10T11:58:35.117941Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T11:58:35.263215Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593701102914505718:2953] connected; active server actors: 1 2026-01-10T11:58:35.264115Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T11:58:35.271540Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T11:58:35.272005Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T11:58:35.311325Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T11:58:35.321303Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T11:58:35. ... 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T11:59:01.141534Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:01.141580Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:01.141593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:01.141614Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:01.141625Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:01.780361Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593701215061012604:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:01.794338Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:59:01.807453Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:59:01.812649Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:59:01.955731Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:01.957380Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:01.957452Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:02.006480Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:02.018920Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:59:02.302891Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexCq0Krp.tmp 2026-01-10T11:59:02.302916Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexCq0Krp.tmp 2026-01-10T11:59:02.303913Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0030a1/r3tmp/yandexCq0Krp.tmp 2026-01-10T11:59:02.304324Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:59:02.808795Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:02.835625Z INFO: TTestServer started on Port 20843 GrpcPort 7424 PQClient connected to localhost:7424 2026-01-10T11:59:05.510988Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:05.547499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T11:59:05.605981Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T11:59:05.636941Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:06.122125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T11:59:06.780346Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593701215061012604:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:06.780397Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:59:10.388435Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593701253715719288:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:10.388887Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:10.392873Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593701253715719300:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:10.414378Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:10.445047Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593701253715719302:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T11:59:10.541539Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593701253715719375:2657] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:59:10.665380Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593701253715719383:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:59:10.677111Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ODM3NTFjZjgtZWUxMDc4ZjEtYWQ5NDg2MDMtNzcyMjc1YTk=, ActorId: [3:7593701253715719260:2336], ActorState: ExecuteState, LegacyTraceId: 01kekwcm0jbwmq2bxy35jv2s2z, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:59:10.679344Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T11:59:10.820340Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:10.876629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:11.444943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/0030a1/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/0030a1/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |82.1%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/public/describer/ut/unittest |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> TxKeys::ComparePointAndRangeWithInf [GOOD] |82.1%| [TS] {RESULT} ydb/core/persqueue/public/describer/ut/unittest |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |82.1%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> TestFormatHandler::ManyJsonClients [GOOD] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2026-01-10T11:59:01.676781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T11:59:01.677749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T11:59:01.677792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:01.688337Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T11:59:01.688694Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T11:59:01.688962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:59:01.735614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:59:01.743744Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:59:01.744346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:59:01.745847Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T11:59:01.745923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T11:59:01.745970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T11:59:01.746353Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:59:01.746424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:59:01.746493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T11:59:01.841072Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:59:01.872041Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T11:59:01.872258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:59:01.872368Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T11:59:01.872418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T11:59:01.872475Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T11:59:01.872508Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:01.872729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:01.872781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:01.873084Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T11:59:01.873173Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T11:59:01.873219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T11:59:01.873253Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:59:01.873287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T11:59:01.873319Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T11:59:01.873364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T11:59:01.873397Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T11:59:01.873434Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T11:59:01.873522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:01.873565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:01.873613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T11:59:01.879918Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T11:59:01.879994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T11:59:01.880103Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T11:59:01.880259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T11:59:01.880325Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T11:59:01.880380Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T11:59:01.880439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T11:59:01.880473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T11:59:01.880508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T11:59:01.880536Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T11:59:01.880863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T11:59:01.880905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T11:59:01.880942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T11:59:01.880970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T11:59:01.881033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T11:59:01.881057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T11:59:01.881082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T11:59:01.881110Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T11:59:01.881137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T11:59:01.896773Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T11:59:01.896847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T11:59:01.896882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T11:59:01.896939Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T11:59:01.897008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T11:59:01.897550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:01.897613Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:01.897653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T11:59:01.897723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T11:59:01.897750Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T11:59:01.897926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T11:59:01.898019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T11:59:01.898060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T11:59:01.898097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T11:59:01.902044Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T11:59:01.902118Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:01.902354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:01.902391Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:01.902449Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T11:59:01.902491Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T11:59:01.902525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T11:59:01.902567Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T11:59:01.902606Z node 1 :TX_DATASHARD TRACE: dat ... ode 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T11:59:19.174733Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:136:2157]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-10T11:59:19.174776Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T11:59:19.174810Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-10T11:59:19.174850Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:19.177165Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2026-01-10T11:59:19.177231Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2026-01-10T11:59:19.177280Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T11:59:19.177660Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T11:59:19.177689Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2026-01-10T11:59:19.177716Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T11:59:19.177751Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-10T11:59:19.177774Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2026-01-10T11:59:19.177826Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T11:59:19.177863Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-10T11:59:19.177931Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:19.178478Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 136 RawX2: 21474838637 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2026-01-10T11:59:19.178579Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:233:2229], Recipient [5:136:2157]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:235:2230] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T11:59:19.178607Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T11:59:19.178672Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:128:2152], Recipient [5:136:2157]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-10T11:59:19.178699Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-10T11:59:19.178734Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-10T11:59:19.178793Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-10T11:59:19.179135Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [5:104:2137], Recipient [5:136:2157]: NActors::TEvents::TEvPoison 2026-01-10T11:59:19.179588Z node 5 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 9437184 2026-01-10T11:59:19.179686Z node 5 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 9437184 2026-01-10T11:59:19.187646Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvBoot 2026-01-10T11:59:19.192928Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvRestored 2026-01-10T11:59:19.193020Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:59:19.195982Z node 5 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:240:2232] 2026-01-10T11:59:19.196219Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:59:19.199849Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:704: TxInitSchema.Execute Persist Sys_SubDomainInfo 2026-01-10T11:59:19.226188Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:59:19.226316Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:59:19.228124Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T11:59:19.228204Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T11:59:19.228259Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T11:59:19.228599Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:59:19.228742Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:59:19.228787Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [5:283:2232] in generation 3 2026-01-10T11:59:19.241678Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:59:19.241787Z node 5 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 9437184 2026-01-10T11:59:19.241866Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-10T11:59:19.241983Z node 5 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2026-01-10T11:59:19.242175Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [5:288:2271] 2026-01-10T11:59:19.242213Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T11:59:19.242256Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-10T11:59:19.242300Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:19.242520Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-10T11:59:19.242628Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-10T11:59:19.242813Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [5:240:2232], Recipient [5:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:19.242863Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:59:19.243109Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T11:59:19.243190Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T11:59:19.243314Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 240 RawX2: 21474838712 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-10T11:59:19.243396Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:240:2232]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-10T11:59:19.243430Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T11:59:19.243470Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-10T11:59:19.243507Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T11:59:19.243588Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [5:26:2073], Recipient [5:240:2232]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-10T11:59:19.243620Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-10T11:59:19.243660Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-10T11:59:19.243749Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T11:59:19.243790Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:59:19.243825Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T11:59:19.243861Z node 5 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T11:59:19.243897Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T11:59:19.243928Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T11:59:19.243966Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T11:59:19.244052Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:286:2269], Recipient [5:240:2232]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:290:2273] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T11:59:19.244111Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T11:59:19.244185Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:128:2152], Recipient [5:240:2232]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-10T11:59:19.244213Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-10T11:59:19.244268Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-10T11:59:19.244321Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-10T11:59:19.260832Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [5:286:2269], Recipient [5:240:2232]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:286:2269] ServerId: [5:290:2273] } 2026-01-10T11:59:19.260888Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed >> TestFormatHandler::ManyRawClients |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_keys/unittest |82.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> SequenceProxy::DropRecreate [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> TListAllTopicsTests::ListLimitAndPaging [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:57:42.416997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:42.514195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:57:42.523064Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:42.523410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:42.523481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:42.952845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:42.952998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:42.959994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046248133755 != 1768046248133758 2026-01-10T11:57:42.982504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:43.033397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:43.282156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:44.132954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:44.199047Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:44.438009Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T11:57:44.438704Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T11:57:44.439831Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T11:57:45.710549Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T11:57:45.712044Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T11:57:45.717337Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T11:57:45.718103Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T11:57:45.719947Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T11:57:45.720677Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T11:57:45.721386Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T11:57:45.747586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:45.751712Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T11:57:45.762339Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T11:57:45.763350Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T11:57:46.070363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T11:57:46.084481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T11:57:46.086010Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T11:57:46.097164Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:57:46.725303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:57:46.728614Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:57:46.729341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:57:46.764127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T11:57:46.764871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T11:57:46.768934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T11:57:46.773379Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:57:46.773844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:57:46.775143Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T11:57:46.790849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:57:47.087348Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T11:57:47.088129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:57:47.089253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T11:57:47.089293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:57:47.089322Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T11:57:47.089362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:47.091314Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:57:47.091713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:57:47.093007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T11:57:47.093771Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T11:57:47.094181Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:57:47.094512Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:57:47.095191Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T11:57:47.095524Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T11:57:47.095882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T11:57:47.096543Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T11:57:47.096887Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:47.097944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:57:47.099027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:57:47.099722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T11:57:47.104204Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T11:57:47.104971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T11:57:47.105935Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T11:57:47.112477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T11:57:47.112841Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T11: ... :21.194259Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-10T11:59:21.194713Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T11:59:21.194754Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T11:59:21.194786Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T11:59:21.194820Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-10T11:59:21.196107Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T11:59:21.199967Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T11:59:21.200848Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-10T11:59:21.201274Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1068:2888], 0} after executionsCount# 1 2026-01-10T11:59:21.201687Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1068:2888], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T11:59:21.202726Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1068:2888], 0} finished in read 2026-01-10T11:59:21.203622Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T11:59:21.203655Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T11:59:21.203682Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T11:59:21.203711Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T11:59:21.204127Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T11:59:21.204155Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T11:59:21.204184Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T11:59:21.204646Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T11:59:21.206021Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T11:59:21.207059Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [8:70:2117], Recipient [8:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710660 LockNode: 8 Status: STATUS_SUBSCRIBED 2026-01-10T11:59:21.215874Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1068:2888], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T11:59:21.215948Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2026-01-10T11:59:21.230051Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [8:1072:2893], Recipient [8:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:21.230875Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:59:21.231895Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [8:1071:2892], serverId# [8:1072:2893], sessionId# [0:0:0] 2026-01-10T11:59:21.233353Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549570, Sender [8:1070:2891], Recipient [8:886:2765]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2026-01-10T11:59:21.234322Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976710657 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2026-01-10T11:59:21.235196Z node 8 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976710660 marked broken at v{min} 2026-01-10T11:59:21.260358Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-10T11:59:21.267173Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270977, Sender [8:26:2073], Recipient [8:886:2765]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2501} 2026-01-10T11:59:21.267240Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2026-01-10T11:59:21.267664Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-10T11:59:21.273624Z node 8 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:59:21.736297Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [8:1094:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-10T11:59:21.737634Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T11:59:21.738441Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-10T11:59:21.739238Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T11:59:21.740875Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-10T11:59:21.741265Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T11:59:21.741651Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T11:59:21.742121Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-10T11:59:21.742539Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T11:59:21.742565Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T11:59:21.742970Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T11:59:21.743487Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-10T11:59:21.743993Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T11:59:21.748266Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T11:59:21.749450Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-10T11:59:21.750503Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1094:2910], 0} after executionsCount# 1 2026-01-10T11:59:21.751277Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1094:2910], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T11:59:21.751719Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1094:2910], 0} finished in read 2026-01-10T11:59:21.752932Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T11:59:21.753381Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T11:59:21.753793Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T11:59:21.754204Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-10T11:59:21.754248Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T11:59:21.754637Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T11:59:21.754669Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-10T11:59:21.755099Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T11:59:21.756342Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T11:59:21.764517Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1094:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T11:59:21.765336Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2026-01-10T11:59:17.125393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T11:59:17.125441Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:17.343568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:17.881907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-10T11:59:18.123448Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T11:59:18.123827Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpfbeDKw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T11:59:18.124401Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpfbeDKw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpfbeDKw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14020713399050335590 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T11:59:19.195671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T11:59:19.195742Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:19.286263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:19.841029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-10T11:59:20.060554Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T11:59:20.061016Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpLawyob/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T11:59:20.061235Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpLawyob/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/00353f/r3tmp/tmpLawyob/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6315558798334775457 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T11:59:20.354464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:353) 2026-01-10T11:59:20.821065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) |82.1%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceproxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_replication/unittest >> TabletService_ChangeSchema::Basics >> TestFormatHandler::ManyRawClients [GOOD] |82.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |82.1%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TestFormatHandler::ClientValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/list_topics/ut/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] Test command err: 2026-01-10T11:58:13.377439Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701010308993913:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:13.377487Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:13.447286Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:58:13.664477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:13.664607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:13.673822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:13.810683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:13.821807Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593701010308993813:2081] 1768046293371233 != 1768046293371236 2026-01-10T11:58:13.822151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:14.332112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:14.398867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:14.678908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0016f7/r3tmp/yandexKaojqQ.tmp 2026-01-10T11:58:14.678932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0016f7/r3tmp/yandexKaojqQ.tmp 2026-01-10T11:58:14.679811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0016f7/r3tmp/yandexKaojqQ.tmp 2026-01-10T11:58:14.680752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:15.112392Z INFO: TTestServer started on Port 6964 GrpcPort 4470 PQClient connected to localhost:4470 2026-01-10T11:58:18.385242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701010308993913:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:18.385305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:18.954405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:19.078710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T11:58:19.645186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:20.951736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T11:58:25.853958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701061848602464:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.858512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.860648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701061848602477:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.860683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701061848602478:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.861093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:25.877023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:25.933772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701061848602481:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T11:58:26.028251Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593701066143569844:2689] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:58:27.228555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:27.249949Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593701066143569852:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:58:27.254250Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWExNjlhNTMtN2Q1YzcwMjktZThjZmViNDgtYjMxYTY3ZjI=, ActorId: [1:7593701061848602453:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwb8ek5n225hsgc3ryf7cb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:58:27.284584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:27.289820Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T11:58:28.254312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:28.650369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:58:28.651131Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593701091913373975:2897] === CheckClustersList. Ok 2026-01-10T11:58:39.263409Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-01-10T11:58:39.279683Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-01-10T11:58:39.680475Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T11:58:39.722062Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T11:58:39.723584Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-10T11:58:39.723956Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T11:58:39.726848Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-10T11:58:39.744326Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T11:58:39.744350Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7593701121978145222:2993], now have 1 active actors on pipe 2026-01-10T11:58:39.765185Z node 1 :PERSQUEUE ... nd: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2026-01-10T11:59:23.348854Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T11:59:23.348934Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T11:59:23.351212Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T11:59:23.360190Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T11:59:23.360910Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T11:59:23.360959Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037895][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2026-01-10T11:59:23.360993Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037897][topic3] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037897 2026-01-10T11:59:23.408533Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.408547Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.408557Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.408570Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.408581Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.438228Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.438255Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.438267Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.438284Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.438296Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.441319Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.441340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.441350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.441364Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.441375Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.509659Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.509691Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.509705Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.509728Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.509741Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.540621Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.540646Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.540656Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.540671Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.540681Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.541663Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.541672Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.541679Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.541689Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.541696Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.612454Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.612481Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.612491Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.612507Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.612517Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.644611Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.644636Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.644648Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.644665Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.644676Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.644711Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.644718Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.644723Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.644731Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.644738Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.713974Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.714008Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.714019Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.714035Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.714046Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.745967Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.746001Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.746015Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.746037Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.746052Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.746101Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.746110Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.746117Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.746126Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.746146Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T11:59:23.814175Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T11:59:23.814213Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.814231Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T11:59:23.814253Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T11:59:23.814267Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/list_topics/ut/unittest >> TMLPChangerTests::CommitTest [GOOD] >> TMLPChangerTests::ReadAndReleaseTest >> HttpRouter::Basic >> HttpRouter::Basic [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] |82.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest |82.2%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest |82.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |82.2%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |82.2%| [TM] {RESULT} ydb/core/persqueue/public/list_topics/ut/unittest |82.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |82.2%| [TS] {BAZEL_UPLOAD} ydb/core/public_http/ut/unittest |82.2%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextIndexScan::BadRequest >> TestFormatHandler::ClientValidation [GOOD] >> DescribeSchemaSecretsService::GetNewValue >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> TestFormatHandler::ClientError >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_disk_quotas/unittest >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:58:40.361797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:40.930841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:58:40.940322Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:58:40.940792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:40.940899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:58:43.292720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:43.293602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:43.348562Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046304705968 != 1768046304705971 2026-01-10T11:58:43.428235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:43.530211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:43.797870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... Setting hard disk quota to 1 byte 2026-01-10T11:58:44.930333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:58:44.931482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:58:44.931852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T11:58:44.933444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:830:2725], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T11:58:44.934554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T11:58:45.696685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } DatabaseQuotas { data_size_hard_quota: 1 } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T11:58:45.698672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T11:58:45.701343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T11:58:45.702565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T11:58:45.705602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T11:58:45.706865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:45.708149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T11:58:45.713648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T11:58:45.715765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T11:58:45.716476Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T11:58:45.716775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-10T11:58:45.718998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.719341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.719969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T11:58:45.720746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T11:58:45.721904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-10T11:58:45.722658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-10T11:58:45.723882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T11:58:45.730451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T11:58:45.730880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T11:58:45.730912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-10T11:58:45.732743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T11:58:45.733179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-10T11:58:45.733671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.734096Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.734510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T11:58:45.734881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T11:58:45.735257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-10T11:58:45.736412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T11:58:45.739184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T11:58:45.739211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-10T11:58:45.739297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.739323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T11:58:45.739760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T11:58:45.739792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T11:58:45.740951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T11:58:45.741751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T11:58:45.742194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-10T11:58:45.778685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T11:58:45.781456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T11:58:45.781894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-10T11:58:45.783739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T11:58:45.786463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 281474976715657 2026-01-10T11:58:45.790153Z node 1 :FLAT_TX_SC ... hard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [3:399:2398], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-10T11:59:27.603647Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-10T11:59:27.603715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T11:59:27.603763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-10T11:59:27.603845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2026-01-10T11:59:27.604332Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [3:915:2777], Recipient [3:924:2783]: NKikimr::TEvTablet::TEvTabletDead 2026-01-10T11:59:27.604697Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T11:59:27.604823Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T11:59:27.606364Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [3:1243:3044], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T11:59:27.606399Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T11:59:27.606423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-10T11:59:27.607105Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:921:2781], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [3:921:2781] ServerId: [3:926:2784] } 2026-01-10T11:59:27.607155Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T11:59:27.607196Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2026-01-10T11:59:27.607751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T11:59:27.607820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T11:59:27.608023Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:1535:3269], Recipient [3:399:2398]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:1535:3269] ServerId: [3:1536:3270] } 2026-01-10T11:59:27.608054Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T11:59:27.608127Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-01-10T11:59:27.608395Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T11:59:27.608492Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T11:59:27.933376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T11:59:27.933464Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T11:59:27.933569Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:399:2398], Recipient [3:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T11:59:27.933607Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... Inserting the 4th row 2026-01-10T11:59:28.517207Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [3:1567:3284], Recipient [3:1390:3156]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-10T11:59:28.517688Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037890 2026-01-10T11:59:28.518211Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [3:1390:3156], Recipient [3:1390:3156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T11:59:28.518242Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T11:59:28.519096Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037890 2026-01-10T11:59:28.519989Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037890, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-10T11:59:28.521321Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table2, shard: 72075186224037890, write point (Uint32 : 4) 2026-01-10T11:59:28.521744Z node 3 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:39:1] 2026-01-10T11:59:28.522205Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CheckWrite 2026-01-10T11:59:28.523025Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-10T11:59:28.523481Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CheckWrite 2026-01-10T11:59:28.523956Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-10T11:59:28.524385Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-10T11:59:28.524817Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976710659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T11:59:28.525279Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037890 2026-01-10T11:59:28.525701Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-10T11:59:28.525726Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-10T11:59:28.525748Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BlockFailPoint 2026-01-10T11:59:28.525768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BlockFailPoint 2026-01-10T11:59:28.525788Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-10T11:59:28.525806Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BlockFailPoint 2026-01-10T11:59:28.525824Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit ExecuteWrite 2026-01-10T11:59:28.525842Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit ExecuteWrite 2026-01-10T11:59:28.525882Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037890 2026-01-10T11:59:28.526308Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976710659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T11:59:28.527131Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037890, row count=1 2026-01-10T11:59:28.528001Z node 3 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T11:59:28.528435Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is ExecutedNoMoreRestarts 2026-01-10T11:59:28.528846Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit ExecuteWrite 2026-01-10T11:59:28.529309Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit FinishProposeWrite 2026-01-10T11:59:28.529741Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-10T11:59:28.529819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is DelayCompleteNoMoreRestarts 2026-01-10T11:59:28.530228Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit FinishProposeWrite 2026-01-10T11:59:28.530673Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T11:59:28.531047Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CompletedOperations 2026-01-10T11:59:28.531086Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-10T11:59:28.531106Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T11:59:28.531132Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037890 has finished 2026-01-10T11:59:28.546832Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037890 2026-01-10T11:59:28.547266Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-10T11:59:28.547699Z node 3 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037890 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T11:59:28.552747Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_disk_quotas/unittest >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> TestFormatHandler::ClientError [GOOD] >> TTxDataShardBuildFulltextIndexScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextIndexScan::Build |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_disk_quotas/unittest |82.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |82.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> TestFormatHandler::ClientErrorWithEmptyFilter >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> TestFormatHandler::Watermark >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TTxDataShardBuildFulltextIndexScan::Build [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithData >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> TestFormatHandler::Watermark [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2026-01-10T11:59:42.919750Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T11:59:43.156085Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskGuid: 5263180497727559684 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 5263180497727559684 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 5263180497727559684 } } } } AvailabilityDomains: 0 } 2026-01-10T11:59:43.158447Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T11:59:43.166990Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T11:59:43.176526Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 5263180497727559684 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T11:59:43.196852Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 5263180497727559684 2026-01-10T11:59:43.197472Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T11:59:43.209076Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2076] ControllerId# 72057594037932033 2026-01-10T11:59:43.209517Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T11:59:43.211591Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T11:59:43.213398Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T11:59:43.611098Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T11:59:43.618297Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T11:59:44.210654Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T11:59:44.212591Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T11:59:44.212648Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T11:59:44.213563Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T11:59:44.227112Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-10T11:59:44.227174Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-10T11:59:44.227234Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-10T11:59:44.234677Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "&\347iy[\265\034\337\014\346\023 \375/7\\\036r\026C" } 2026-01-10T11:59:44.235290Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-10T11:59:44.235347Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-10T11:59:44.242723Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskGuid: 5263180497727559684 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 5263180497727559684 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 5263180497727559684 } } } } AvailabilityDomains: 0 } 2026-01-10T11:59:44.242903Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T11:59:44.244119Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-10T11:59:44.254556Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-10T11:59:44.255605Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-10T11:59:44.275218Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T11:59:44.280117Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T11:59:44.280551Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T11:59:44.281151Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:59:44.282449Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T11:59:44.282689Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T11:59:44.282718Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T11:59:44.282908Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T11:59:44.296239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T11:59:44.296710Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T11:59:44.296868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T11:59:44.297166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T11:59:44.297268Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T11:59:44.297388Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T11:59:44.339210Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T11:59:44.339804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T11:59:44.369451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T11:59:44.369567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T11:59:44.370023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T11:59:44.370499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T11:59:44.370761Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T11:59:44.371194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T11:59:44.371437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T11:59:44.372015Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T11:59:44.390125Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T11:59:44.390424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T11:59:44.411953Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T11:59:44.412307Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T11:59:44.420413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T11:59:44.420464Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T11:59:44.465154Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T11:59:44.465231Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T11:59:44.465590Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:92:2123] SessionId# [0:0:0] Cookie# 0 Pipe connected clientId# [1:29:2076] 2026-01-10T11:59:44.465976Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2026-01-10T11:59:44.470132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T11:59:44.472250Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72 ... okie# 0 === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:29:2076] 2026-01-10T11:59:44.515516Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2026-01-10T11:59:44.516011Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2160] ControllerId# 72057594037932033 2026-01-10T11:59:44.523469Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T11:59:44.524857Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:143} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2026-01-10T11:59:44.525085Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 2147483648 HasGroupInfo# false GroupInfoGeneration# 2026-01-10T11:59:44.525334Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:285} RequestGroupConfig GroupId# 2147483648 2026-01-10T11:59:44.529105Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 2026-01-10T11:59:44.529417Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-10T11:59:44.530339Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-10T11:59:44.556360Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 Pipe connected clientId# [1:139:2160] 2026-01-10T11:59:44.558800Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2160] ServerId# [1:150:2169] TabletId# 72057594037932033 PipeClientId# [1:139:2160] 2026-01-10T11:59:44.560338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 5263180497727559684 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2026-01-10T11:59:44.560718Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2026-01-10T11:59:44.562014Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2026-01-10T11:59:44.563210Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:150:2169] RecipientRewrite# [1:92:2123] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2026-01-10T11:59:44.563924Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2026-01-10T11:59:44.565230Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2026-01-10T11:59:44.566021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2026-01-10T11:59:44.599613Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskGuid: 5263180497727559684 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "9af399b2-9364b527-bdaa6894-e0f26a07" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-10T11:59:44.601011Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskGuid: 5263180497727559684 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "9af399b2-9364b527-bdaa6894-e0f26a07" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-10T11:59:44.601377Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/d9xi/001924/r3tmp/tmp72WJYR/static.dat" PDiskGuid: 5263180497727559684 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-10T11:59:44.602472Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T11:59:44.602853Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T11:59:44.603262Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 5263180497727559684 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T11:59:44.615294Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 5263180497727559684 2026-01-10T11:59:44.637499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2026-01-10T11:59:44.646858Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-10T11:59:44.647210Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-10T11:59:44.647345Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 5263180497727559684 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-10T11:59:44.647517Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T11:59:44.647602Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T11:59:44.649189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 5263180497727559684 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2026-01-10T11:59:44.651693Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2026-01-10T11:59:44.675937Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2026-01-10T11:59:44.701169Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-10T11:59:44.702278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 5263180497727559684 Status: REPLICATING OnlyPhantomsRemain: false } } 2026-01-10T11:59:44.707666Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-10T11:59:44.708828Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 5263180497727559684 Status: READY OnlyPhantomsRemain: false } } 2026-01-10T11:59:45.344095Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34189869056 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17041457152 State: Normal SlotCount: 2 SlotSizeInUnits: 0 PDiskUsage: 0.10449320794148381 } } >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> TestFormatHandler::WatermarkWhere >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> TestFormatHandler::WatermarkWhere [GOOD] >> TMLPChangerTests::ReadAndReleaseTest [GOOD] >> TMLPChangerTests::CapacityTest [GOOD] >> TMLPReaderTests::TopicNotExists >> ProtoTests::CreateQueueFiller [GOOD] >> ProtoTests::UpdateQueueFiller [GOOD] >> ProtoTests::DeleteQueueFiller [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TestFormatHandler::WatermarkWhereFalse >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupMultiMatches [GOOD] >> GenericProviderLookupActor::LookupWithErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:59:13.399241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:14.323426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:59:14.395515Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:59:14.397404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:14.398213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:59:15.777452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:15.779722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:15.830553Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046341377221 != 1768046341377224 2026-01-10T11:59:15.940215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:16.071152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:16.242492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:59:16.954437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:17.007012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:17.144030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:17.459918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:17.460050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:17.460155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:17.461039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:17.461366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:17.474965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:17.629993Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:59:17.701090Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } ... waiting for SysViewsRoster update finished 2026-01-10T11:59:23.854782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:23.864920Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:59:23.865278Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:23.865333Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:59:24.649249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:24.649344Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:24.808640Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046359053769 != 1768046359053773 2026-01-10T11:59:24.842086Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:24.950973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:25.107888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:59:25.917449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:25.940812Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:26.135523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:27.296401Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1031:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:27.296507Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:27.296583Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:27.297459Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1047:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:27.297783Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:27.301627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:27.486797Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1045:2877], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:59:27.526681Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1104:2917] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_ ... IVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:38.729173Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:59:39.077261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:39.100312Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:39.292457Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:39.703390Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:39.703480Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:39.703545Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:39.704374Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:39.704515Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:39.708456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:39.879522Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T11:59:39.932629Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1055:2879] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } ... waiting for SysViewsRoster update finished 2026-01-10T11:59:48.077930Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:48.113582Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:59:48.115268Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:48.115677Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:59:49.263288Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:49.265143Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:49.330177Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1768046384261122 != 1768046384261125 2026-01-10T11:59:49.352224Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:49.439175Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:49.557992Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:59:50.419024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:50.463004Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:50.715235Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:51.596719Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:51.596840Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:51.597875Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:51.598560Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:51.598704Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:51.603161Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:51.978024Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:997:2840], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T11:59:52.062383Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:59:53.554430Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1065:2888], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2026-01-10T11:59:53.583508Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=MTJmZmJkNzktNGJiMjc0MTEtNTA1MDMyMjAtMTY3ZDg5YTY=, ActorId: [4:980:2829], ActorState: ExecuteState, LegacyTraceId: 01kekwdw8tat45rmf891r2c3fk, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } } tx_id# trace_id# 2026-01-10T11:59:54.585463Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1087:2904], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2026-01-10T11:59:54.640221Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YWM4NWUxNjQtMmIyNmMxZTgtOTk1OTkwOTItNDE0MTk3ZDE=, ActorId: [4:1079:2896], ActorState: ExecuteState, LegacyTraceId: 01kekwdya70zvp724r62pwy5xv, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } } tx_id# trace_id# >> GenericProviderLookupActor::LookupWithErrors [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV2 |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_sequence/unittest >> DescribeSchemaSecretsService::GetNewValue [GOOD] >> DescribeSchemaSecretsService::GetUpdatedValue >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> ProtoTests::DeleteQueueFiller [GOOD] Test command err: 2026-01-10T11:59:27.277515Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701327948977096:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:27.277744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:59:28.275155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:29.090668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:59:29.766318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:29.766412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:29.847038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:31.496961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:31.497032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:59:31.533125Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:31.536250Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593701323654009557:2081] 1768046367186099 != 1768046367186102 2026-01-10T11:59:32.276571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701327948977096:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:32.276654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:59:32.432307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:59:32.432328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:59:32.432335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:59:32.432405Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:59:36.135397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:36.141789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T11:59:43.541661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:59:43.541687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:44.908617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701400963421924:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:44.908723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:44.909034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701400963421946:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:44.909074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701400963421947:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:44.909191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:44.928436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:44.975586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:44.980247Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701400963421950:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T11:59:45.136180Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593701405258389349:2679] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1768046386326,8787290272151131010,'queue1','CreateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1768046387370,17744492250728692869,'queue1','UpdateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1768046387745,14060397879662490695,'queue1','DeleteMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== 2026-01-10T11:59:31.526923Z: component=schemeshard, tx_id=281474976715657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.529618Z: component=schemeshard, tx_id=281474976715658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.529844Z: component=schemeshard, tx_id=281474976715659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530104Z: component=schemeshard, tx_id=281474976715660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530242Z: component=schemeshard, tx_id=281474976715661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530353Z: component=schemeshard, tx_id=281474976715662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530458Z: component=schemeshard, tx_id=281474976715663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530583Z: component=schemeshard, tx_id=281474976715664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530740Z: component=schemeshard, tx_id=281474976715665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530875Z: component=schemeshard, tx_id=281474976715666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.530993Z: component=schemeshard, tx_id=281474976715667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.531170Z: component=schemeshard, tx_id=281474976715668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.531318Z: component=schemeshard, tx_id=281474976715669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.531472Z: component=schemeshard, tx_id=281474976715670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.531825Z: component=schemeshard, tx_id=281474976715671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532093Z: component=schemeshard, tx_id=281474976715672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532251Z: component=schemeshard, tx_id=281474976715673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532391Z: component=schemeshard, tx_id=281474976715674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532538Z: component=schemeshard, tx_id=281474976715675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532638Z: component=schemeshard, tx_id=281474976715676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532754Z: component=schemeshard, tx_id=281474976715677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532853Z: component=schemeshard, tx_id=281474976715678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.532982Z: component=schemeshard, tx_id=281474976715679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.535326Z: component=schemeshard, tx_id=281474976715680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.535486Z: component=schemeshard, tx_id=281474976715681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.535577Z: component=schemeshard, tx_id=281474976715682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.535700Z: component=schemeshard, tx_id=281474976715683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.535794Z: component=schemeshard, tx_id=281474976715684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.538180Z: component=schemeshard, tx_id=281474976715685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.540811Z: component=schemeshard, tx_id=281474976715686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.541628Z: component=schemeshard, tx_id=281474976715687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.542245Z: component=schemeshard, tx_id=281474976715688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.542398Z: component=schemeshard, tx_id=281474976715689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.542541Z: component=schemeshard, tx_id=281474976715690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.542900Z: component=schemeshard, tx_id=281474976715691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:31.543252Z: component=schemeshard, tx_id=281474976715692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:36.136159Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:36.143907Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:36.383420Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS/Root/SQS/CreateCloudEventProcessor], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:44.942111Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[.metadata/workload_manager/pools/default], status=SUCCESS, detailed_status=StatusAccepted, new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-10T11:59:44.979030Z: component=schemeshard, tx_id=281474976710661, remote_address=127.0.0.1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE TABLE, paths=[/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T11:59:45.135882Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[default], status=SUCCESS, detailed_status=StatusAlreadyExists, reason=Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges), new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-10T11:59:49.074923Z: component=ymq, id=8787290272151131010$CreateMessageQueue$2026-01-10T11:59:49.073067Z, operation=CreateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.create, created_at=2026-01-10T11:59:46.326000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8787290272151131010$CreateMessageQueue$2026-01-10T11:59:46.326000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-10T11:59:49.083588Z: component=ymq, id=17744492250728692869$UpdateMessageQueue$2026-01-10T11:59:49.073125Z, operation=UpdateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.setAttributes, created_at=2026-01-10T11:59:47.370000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=17744492250728692869$UpdateMessageQueue$2026-01-10T11:59:47.370000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-10T11:59:49.083761Z: component=ymq, id=14060397879662490695$DeleteMessageQueue$2026-01-10T11:59:49.073163Z, operation=DeleteMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.delete, created_at=2026-01-10T11:59:47.745000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=14060397879662490695$DeleteMessageQueue$2026-01-10T11:59:47.745000Z, queue=queue1, labels={"k1" : "v1"} |82.2%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2026-01-10 11:59:57.852 INFO ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007FCBA08AF000) [generic] yql_generic_lookup_actor.cpp:153: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2026-01-10 11:59:57.866 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007FCBA08AF000) [generic] yql_generic_lookup_actor.cpp:301: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } ... "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2026-01-10 11:59:57.978 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:332: ActorId=[3:7593701454252637727:2051] Got TListSplitsStreamIterator 2026-01-10 11:59:57.978 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:200: ActorId=[3:7593701454252637727:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2026-01-10 11:59:57.979 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:233: ActorId=[3:7593701454252637727:2051] Got ReadSplitsStreamIterator from Connector 2026-01-10 11:59:57.979 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:354: ActorId=[3:7593701454252637727:2051] Got DataChunk 2026-01-10 11:59:57.980 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:365: ActorId=[3:7593701454252637727:2051] Got EOF 2026-01-10 11:59:57.980 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=22981, tid=0x00007BCB9B9D6640) [generic] yql_generic_lookup_actor.cpp:419: Sending lookup results for 3 keys |82.2%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/generic/actors/ut/unittest >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> MediatorTest::BasicTimecastUpdates >> TTxDataShardBuildFulltextIndexScan::BuildWithData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks >> test.py::test[solomon-Downsampling-default.txt] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.2%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |82.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.2%| [TS] {RESULT} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |82.2%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> TestFormatHandler::WatermarkWhereFalse [GOOD] >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TestJsonParser::Simple1 >> TestJsonParser::Simple1 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV2 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 >> TestJsonParser::Simple2 [GOOD] >> TestJsonParser::Simple3 >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey >> TestJsonParser::Simple3 [GOOD] >> TestJsonParser::Simple4 >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> TestJsonParser::LargeStrings [GOOD] >> TestJsonParser::ManyValues >> TestJsonParser::ManyValues [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> TestJsonParser::MissingFields >> TestJsonParser::MissingFields [GOOD] >> TestJsonParser::NestedTypes >> DescribeSchemaSecretsService::GetUpdatedValue [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue >> TestJsonParser::NestedTypes [GOOD] >> TestJsonParser::SimpleBooleans >> TestJsonParser::SimpleBooleans [GOOD] >> TestJsonParser::ChangeParserSchema >> TestJsonParser::ChangeParserSchema [GOOD] >> TestJsonParser::ManyBatches [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> TestJsonParser::LittleBatches >> TMLPReaderTests::TopicNotExists [GOOD] >> TMLPReaderTests::TopicWithoutConsumer >> TestJsonParser::LittleBatches [GOOD] >> TestJsonParser::MissingFieldsValidation >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation [GOOD] >> TestJsonParser::NestedJsonValidation >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> MediatorTimeCast::ReadStepSubscribe >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> TestJsonParser::JsonStructureValidation [GOOD] >> TestJsonParser::SkipErrors_Simple1 >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemoryController::ResourceBroker_ConfigCS >> TestJsonParser::SkipErrors_Simple1 [GOOD] >> TestJsonParser::SkipErrors_StringValidation >> TestJsonParser::SkipErrors_StringValidation [GOOD] >> TestJsonParser::SkipErrors_NoField >> TestJsonParser::SkipErrors_NoField [GOOD] >> MediatorTest::BasicTimecastUpdates [GOOD] >> TestJsonParser::SkipErrors_NoJson |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd >> TestJsonParser::SkipErrors_NoJson [GOOD] >> TestJsonParser::SkipErrors_Optional |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> TestJsonParser::SkipErrors_Optional [GOOD] |82.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 [GOOD] >> TestJsonParser::SkipErrors1JsonIn2Messages >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 >> TestJsonParser::SkipErrors1JsonIn2Messages [GOOD] >> TestPurecalcFilter::Simple1 >> MediatorTest::MultipleTablets >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> TCreateAndDropViewTest::DropViewIfExists [GOOD] >> TCreateAndDropViewTest::DropViewInFolder >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance [GOOD] >> TTxDataShardBuildIndexScan::BadRequest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> TMemoryController::ResourceBroker_ConfigCS [GOOD] >> TMemoryController::GroupedMemoryLimiter_ConfigCS >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> DescribeSchemaSecretsService::GetUnexistingValue [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> TestPurecalcFilter::Simple1 [GOOD] >> TMLPReaderTests::TopicWithoutConsumer [GOOD] >> TMLPReaderTests::EmptyTopic >> TestPurecalcFilter::Simple2 >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> Graph::CreateGraphShard >> MediatorTest::MultipleTablets [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> MediatorTest::TabletAckBeforePlanComplete >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TTxDataShardBuildIndexScan::BadRequest [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> TMemoryController::GroupedMemoryLimiter_ConfigCS [GOOD] >> TMemoryController::ColumnShardCaches_Config >> TestPurecalcFilter::Simple2 [GOOD] >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut >> TestPurecalcFilter::ManyValues >> test_workload.py::TestYdbMixedWorkload::test[row] >> overlapping_portions.py::TestOverlappingPortions::test >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] >> test.py::TestViewer::test_whoami_root |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |82.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> MediatorTest::TabletAckWhenDead >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> TestPurecalcFilter::ManyValues [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> DescribeSchemaSecretsService::GetDroppedValue [GOOD] >> DescribeSchemaSecretsService::GetInParallel >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> TestPurecalcFilter::NullValues >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] >> MediatorTimeCast::GranularTimecast [GOOD] >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> TMemoryController::ColumnShardCaches_Config [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> TMLPReaderTests::EmptyTopic [GOOD] >> TMLPReaderTests::TopicWithData >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:00:50.729139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:51.670296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:00:51.678878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:51.679272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:51.679371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:00:52.136064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:00:52.136273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:00:52.141822Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046416428759 != 1768046416428762 2026-01-10T12:00:52.158983Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:00:52.225293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:00:52.355062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:00:52.761790Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:922: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2026-01-10T12:00:52.762176Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-10T12:00:52.766573Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 1000 2026-01-10T12:00:52.777677Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:00:52.903431Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1500 2026-01-10T12:00:53.005170Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2026-01-10T12:00:53.143601Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2026-01-10T12:00:53.289420Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2026-01-10T12:00:53.439124Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2026-01-10T12:00:53.614376Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2026-01-10T12:00:53.625092Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 8000 } 2026-01-10T12:00:53.757215Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2026-01-10T12:00:53.895096Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 8000 2026-01-10T12:00:53.897453Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [1:26:2073] HANDLE EvClientDestroyed 2026-01-10T12:00:53.919651Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-10T12:00:53.920578Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 8000 2026-01-10T12:00:53.933368Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 13000 } 2026-01-10T12:00:54.040050Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8500 2026-01-10T12:00:54.143338Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2026-01-10T12:00:54.313282Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2026-01-10T12:00:54.497252Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2026-01-10T12:00:54.669366Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2026-01-10T12:00:54.815445Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 13000 ... waiting for SysViewsRoster update finished 2026-01-10T12:00:59.560249Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:59.571320Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:59.571618Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:59.571673Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:01:00.002577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:00.002715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:00.032857Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046455455715 != 1768046455455719 2026-01-10T12:01:00.037506Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:00.093422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:00.175398Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:01:00.530831Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-10T12:01:00.531760Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2026-01-10T12:01:00.531844Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:378: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-10T12:01:00.531904Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:886:2754] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2026-01-10T12:01:00.532950Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-10T12:01:00.533151Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 1000 2026-01-10T12:01:00.533334Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2026-01-10T12:01:00.533596Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-10T12:01:00.533695Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2026-01-10T12:01:00.533754Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:889:2756] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 1000} 2026-01-10T12:01:00.533990Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 1000 2026-01-10T12:01:00.534230Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-10T12:01:00.534326Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2026-01-10T12:01:00.534384Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:890:2757] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 1000} 2026-01-10T12:01:00.534607Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxM ... GranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.304688Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.318343Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.335003Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.346458Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.372950Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.398295Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-10T12:01:01.420671Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2026-01-10T12:01:01.434076Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [2:26:2073] HANDLE EvClientDestroyed 2026-01-10T12:01:01.434272Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 6 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-10T12:01:01.434352Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-10T12:01:01.435190Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-10T12:01:01.435347Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-10T12:01:01.435401Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-10T12:01:01.462953Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2026-01-10T12:01:01.463356Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-10T12:01:01.486355Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-10T12:01:01.497468Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... tablet1 at 4000 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-10T12:01:01.508490Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4500} ... tablet1 at 4500 ... tablet2 at 4500 ... tablet3 at 4500 |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/time_cast/ut/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> MediatorTest::TabletAckWhenDead [GOOD] |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> MediatorTest::PlanStepAckToReconnectedMediator >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.4%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest |82.4%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> Graph::MemoryBackendFullCycle [GOOD] |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> Graph::LocalBackendFullCycle >> TestPurecalcFilter::NullValues [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0B ... waiting for SysViewsRoster update finished 2026-01-10T11:57:41.852318Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2026-01-10T11:57:41.852935Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2026-01-10T11:57:41.853065Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 12.5MiB Min: 12.5MiB Max: 12.5MiB 2026-01-10T11:57:41.853186Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2026-01-10T11:57:41.853267Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 60MiB queue_cs_general: 7.5MiB queue_cs_indexation: 2.5MiB queue_cs_normalizer: 7.5MiB queue_cs_ttl: 2.5MiB queue_kqp_resource_manager: 40MiB 2026-01-10T11:57:41.854138Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:161: Bootstrapped with config HardLimitBytes: 209715200 2026-01-10T11:57:41.859637Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T11:57:41.861952Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 33554432 2026-01-10T11:57:41.867692Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesScanGroupedMemory [1:51:2098] registered 2026-01-10T11:57:41.881541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:41.994809Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesCompGroupedMemory [1:52:2099] registered 2026-01-10T11:57:41.995564Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDeduplicationGroupedMemory [1:53:2100] registered 2026-01-10T11:57:41.996578Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 2621440 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 2621440 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2026-01-10T11:57:41.997323Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 62914560 } 2026-01-10T11:57:41.998562Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-10T11:57:41.999579Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer SharedCache [1:20:2067] registered 2026-01-10T11:57:41.999711Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:238: Register memory consumer 2026-01-10T11:57:41.999773Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-10T11:57:42.000257Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesBlobCache [1:21:2068] registered 2026-01-10T11:57:42.002201Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDataAccessorCache [1:22:2069] registered 2026-01-10T11:57:42.002327Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesColumnDataCache [1:23:2070] registered 2026-01-10T11:57:42.003033Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-10T11:57:42.004670Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-10T11:57:42.005135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:57:42.013565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:291:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:57:42.014080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:42.014202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:42.061526Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:429:2394] 1 registered 2026-01-10T11:57:42.074180Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2396] 0 registered 2026-01-10T11:57:42.080461Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2396] 2 registered 2026-01-10T11:57:42.081811Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2396] 4 registered 2026-01-10T11:57:42.082035Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:440:2396] 5 registered 2026-01-10T11:57:42.085292Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:453:2398] 1 registered 2026-01-10T11:57:42.090057Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:453:2398] 2 registered 2026-01-10T11:57:42.131852Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 1 registered 2026-01-10T11:57:42.180453Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 2 registered 2026-01-10T11:57:42.180989Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 3 registered 2026-01-10T11:57:42.181507Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 4 registered 2026-01-10T11:57:42.181608Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 5 registered 2026-01-10T11:57:42.182555Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 6 registered 2026-01-10T11:57:42.182802Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 7 registered 2026-01-10T11:57:42.182896Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 8 registered 2026-01-10T11:57:42.183061Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 9 registered 2026-01-10T11:57:42.183211Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 10 registered 2026-01-10T11:57:42.183589Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 11 registered 2026-01-10T11:57:42.183899Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 12 registered 2026-01-10T11:57:42.190038Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:459:2400] 13 registered 2026-01-10T11:57:42.217651Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: Me ... in: 50MiB Max: 50MiB 2026-01-10T12:01:03.082828Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-10T12:01:03.082864Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-10T12:01:03.082900Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-10T12:01:03.082937Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 50MiB Min: 50MiB Max: 50MiB 2026-01-10T12:01:03.082973Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 31.3MiB Min: 31.3MiB Max: 31.3MiB 2026-01-10T12:01:03.083023Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 100MiB 2026-01-10T12:01:03.083153Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 236MiB 2026-01-10T12:01:03.083195Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-10T12:01:03.313473Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-10T12:01:03.314304Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-10T12:01:03.314382Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-10T12:01:03.314424Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.314471Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.314507Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.314542Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.314578Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.314620Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.314679Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-10T12:01:03.314752Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-10T12:01:03.314861Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 600MiB queue_cs_general: 75MiB queue_cs_indexation: 25MiB queue_cs_normalizer: 75MiB queue_cs_ttl: 25MiB queue_kqp_resource_manager: 400MiB 2026-01-10T12:01:03.315212Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 26214400 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 26214400 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 419430400 } } ResourceLimit { Memory: 629145600 } 2026-01-10T12:01:03.332629Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 419430400 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 629145600 } 2026-01-10T12:01:03.334980Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-10T12:01:03.335623Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-10T12:01:03.335705Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-10T12:01:03.336578Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-10T12:01:03.496241Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-10T12:01:03.496907Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-10T12:01:03.496972Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-10T12:01:03.497006Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.497050Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.497088Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.497122Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.497159Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-10T12:01:03.497197Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-10T12:01:03.497235Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-10T12:01:03.497293Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-10T12:01:03.497434Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-10T12:01:03.497479Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/memory_controller/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TestPurecalcFilter::PartialPush >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> MediatorTest::WatcherReconnect >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> TestPurecalcFilter::PartialPush [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> DescribeSchemaSecretsService::GetInParallel [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TestPurecalcFilter::CompilationValidation >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |82.5%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> MediatorTest::WatcherReconnect [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> MediatorTest::MultipleSteps >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.5%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> TestPurecalcFilter::CompilationValidation [GOOD] >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TMLPReaderTests::TopicWithData [GOOD] >> TMLPReaderTests::TopicWithManyIterationsData >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> TestPurecalcFilter::Emtpy >> MediatorTest::MultipleSteps [GOOD] >> TestPurecalcFilter::Emtpy [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> TestPurecalcFilter::Watermark |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> MediatorTest::WatchesBeforeFirstStep >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> Graph::LocalBordersOnGet [GOOD] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:00:52.965086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:00:52.965209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:00:52.965279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:00:52.965324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:00:52.965368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:00:52.965412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:00:52.965577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:00:52.965647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:00:52.966552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:00:52.966854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:00:53.068507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:00:53.068579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:00:53.109663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:00:53.109798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:00:53.109966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:00:53.159899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:00:53.160222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:00:53.161036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:00:53.161331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:00:53.165493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:00:53.165719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:00:53.167061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:00:53.167142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:00:53.167242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:00:53.167300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:00:53.167350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:00:53.167659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:00:53.394618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.395699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.395852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.395924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:00:53.396803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0T12:01:26.427783Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.427819Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.427898Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2026-01-10T12:01:26.427923Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.427955Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.427991Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.436166Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2026-01-10T12:01:26.436283Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.436375Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.436472Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.436797Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2026-01-10T12:01:26.436837Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.436878Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.436939Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437040Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2026-01-10T12:01:26.437068Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437100Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.437132Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437215Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2026-01-10T12:01:26.437240Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437273Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.437306Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437380Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2026-01-10T12:01:26.437404Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437433Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.437467Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437538Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2026-01-10T12:01:26.437563Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437592Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.437624Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437701Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2026-01-10T12:01:26.437732Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437764Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.437803Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.437913Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2026-01-10T12:01:26.437943Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.437975Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.438014Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.438096Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2026-01-10T12:01:26.438124Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.438169Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.438205Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.438305Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2026-01-10T12:01:26.438331Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-10T12:01:26.438361Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-10T12:01:26.438402Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-10T12:01:26.438462Z node 6 :GRAPH TRACE: shard_impl.cpp:226: SHARD Handle TEvGraph::TEvGetMetrics from [6:1019:2951] 2026-01-10T12:01:26.438532Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:20: SHARD TTxGetMetrics::Execute 2026-01-10T12:01:26.438591Z node 6 :GRAPH DEBUG: backends.cpp:352: DB Querying from 0 to 119 2026-01-10T12:01:26.457099Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457205Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457242Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457280Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457309Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457340Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457370Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457401Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457429Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457457Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457489Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457523Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457555Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457589Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457621Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457650Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457682Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457715Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457747Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457776Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457809Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457838Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457866Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457894Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457925Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457957Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.457990Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458020Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458048Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458078Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458109Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458140Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458172Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458203Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458232Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458261Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458292Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458333Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458379Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458413Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458444Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458476Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458514Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458545Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458573Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458605Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458635Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458664Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458697Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458726Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458756Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458789Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458822Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458852Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458881Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458910Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458943Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.458973Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.459005Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.459037Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-10T12:01:26.459079Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:25: SHARD TTxGetMetric::Complete 2026-01-10T12:01:26.459142Z node 6 :GRAPH TRACE: tx_get_metrics.cpp:26: SHARD TxGetMetrics returned 60 points for request 3 2026-01-10T12:01:26.459366Z node 6 :GRAPH TRACE: service_impl.cpp:201: SVC TEvMetricsResult 3 2026-01-10T12:01:26.459432Z node 6 :GRAPH TRACE: service_impl.cpp:204: SVC TEvMetricsResult found request 3 resending to [6:1020:2952] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/graph/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |82.5%| [TM] {RESULT} ydb/core/graph/ut/unittest |82.5%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.5%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:59:40.091284Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:41.055604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:59:41.063478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:59:41.063910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:41.064022Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:59:43.930506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:43.931418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:43.996109Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046367257084 != 1768046367257087 2026-01-10T11:59:44.126940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:44.229529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:44.334646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:59:44.651628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:44.669708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema ... waiting for SysViewsRoster update finished 2026-01-10T11:59:59.541751Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:59.549349Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:59:59.549640Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:59.549699Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:59:59.806915Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:59.807030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:59.832816Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046393541973 != 1768046393541977 2026-01-10T11:59:59.836992Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:59.884420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:59.959042Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:00:00.605052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:00:00.663860Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) ... waiting for SysViewsRoster update finished 2026-01-10T12:00:13.369071Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:13.399205Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:00:13.425248Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:13.426314Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:13.426929Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:00:14.499151Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:00:14.499287Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:00:14.537427Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1768046404520273 != 1768046404520277 2026-01-10T12:00:14.543746Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:00:14.640721Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:00:14.732529Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:00:15.045125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:00:15.060248Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:00:22.956335Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:23.013021Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:23.015276Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:23.015777Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:00:24.505512Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:00:24.506815Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:00:24.554497Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1768046416173273 != 1768046416173276 2026-01-10T12:00:24.571070Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:00:24.655943Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:00:24.774392Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:00:25.548532Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.012554s 2026-01-10T12:00:25.565081Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:00:25.608654Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:00:52.641172Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:52.704101Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:52.704539Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:52.704612Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snap ... __operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:00:59.882238Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:01:05.001026Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:01:05.027067Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:01:05.065792Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:293:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:01:05.066261Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:01:05.066460Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:01:06.342538Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:06.342700Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:06.365610Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:34:2081] 1768046460797340 != 1768046460797344 2026-01-10T12:01:06.421014Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:06.564847Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:06.645477Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:01:07.247257Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:07.265027Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:01:11.979504Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:01:12.029862Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:01:12.044384Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:01:12.044869Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:01:12.044953Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:01:12.496189Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:12.496335Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:12.523421Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:34:2081] 1768046468192208 != 1768046468192212 2026-01-10T12:01:12.528963Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:12.575443Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:12.658662Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:01:12.959352Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:12.976151Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:01:18.207569Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:01:18.215181Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:01:18.220468Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:01:18.220783Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:01:18.220949Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:01:18.527623Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:18.527766Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:18.557380Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1768046473844366 != 1768046473844370 2026-01-10T12:01:18.565066Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:18.614295Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:18.707911Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:01:19.145145Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:19.167930Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 2026-01-10T12:01:19.478287Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-10T12:01:25.662460Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:01:25.680789Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:01:25.694408Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:290:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:01:25.695028Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:01:25.695175Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:01:26.459433Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:26.459617Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:26.469185Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1768046480963897 != 1768046480963901 2026-01-10T12:01:26.476928Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:26.529823Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:26.626922Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:01:27.103816Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:27.129011Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2026-01-10T12:01:27.638211Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/tablet/ut/unittest |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.5%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest |82.5%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> MediatorTest::RebootTargetTablets >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TTxDataShardFilterKMeansScan::BadRequest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection >> TestPurecalcFilter::Watermark [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |82.5%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> TestPurecalcFilter::WatermarkWhere |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> MediatorTest::RebootTargetTablets [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> MediatorTest::ResendSubset >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV1 >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |82.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> TTxDataShardFilterKMeansScan::BadRequest [GOOD] >> TTxDataShardFilterKMeansScan::BuildToPosting >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> TestPurecalcFilter::WatermarkWhere [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> MediatorTest::ResendSubset [GOOD] >> TestPurecalcFilter::WatermarkWhereFalse >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> test.py::TestViewer::test_whoami_root [GOOD] >> test.py::TestViewer::test_whoami_database [GOOD] >> test.py::TestViewer::test_whoami_viewer [GOOD] >> test.py::TestViewer::test_whoami_monitoring |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test.py::TestViewer::test_whoami_monitoring [GOOD] >> test.py::TestViewer::test_counter >> MediatorTest::ResendNotSubset |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test.py::TestViewer::test_counter [GOOD] >> test.py::TestViewer::test_viewer_nodelist ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2026-01-10T11:58:10.129697Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700997045855308:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:10.129810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:10.786592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:10.810174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:10.810317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:10.818250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:10.882836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:10.977433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:10.977468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:10.977476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:10.977642Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:11.050278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:11.135155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:11.244042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:11.291978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T11:58:22.603160Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593701045298173729:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:22.647681Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:22.797971Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:23.236655Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:23.470516Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:23.517687Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:23.518099Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:23.600052Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:23.790683Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:24.278133Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:24.446493Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:24.447157Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:24.447166Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:24.447814Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:25.217709Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:25.312288Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T11:58:47.317814Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593701155906318040:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:47.317886Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:48.353911Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:48.488681Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:48.492247Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:48.724553Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:49.200659Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:49.514254Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:50.532422Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:50.565006Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:51.612645Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:51.774682Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:51.775505Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:51.799776Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:51.831667Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:52.392039Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.025482s 2026-01-10T11:58:52.513458Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.043711s 2026-01-10T11:58:52.570175Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.030367s 2026-01-10T11:58:52.751182Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7593701155906318040:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:52.912939Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008186s 2026-01-10T11:58:53.065390Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:53.868294Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:54.262764Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:54.262782Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:54.262792Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:54.262853Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:55.347490Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:55.425897Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T11:59:00.518238Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593701208625827384:2 ... 2026-01-10T12:00:52.654578Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:00:53.309295Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:00:53.435154Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T12:00:53.487818Z node 28 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7593701675511855053:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:00:53.487920Z node 28 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:01:02.675807Z node 31 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7593701734273090718:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:01:02.675975Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:01:02.872454Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:03.192392Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:03.260742Z node 31 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:03.394442Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:03.394576Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:03.558525Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:03.683322Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:01:03.824204Z node 33 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.021014s 2026-01-10T12:01:03.840202Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:01:03.935791Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:01:03.935826Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:01:03.935844Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:01:03.935964Z node 31 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:01:04.905189Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:05.143296Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T12:01:13.372425Z node 34 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7593701782565304832:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:01:13.372739Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:01:13.469179Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:01:13.516468Z node 34 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.007669s 2026-01-10T12:01:13.579097Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:13.860259Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:14.028905Z node 34 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:14.051425Z node 34 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [34:7593701782565304665:2083] 1768046473335955 != 1768046473335958 2026-01-10T12:01:14.123003Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:14.123146Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:14.173367Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:14.280191Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:14.372222Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:01:14.632859Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:01:14.632917Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:01:14.632932Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:01:14.633066Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:01:15.252559Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:01:15.256768Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:15.642206Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-10T12:01:28.586201Z node 37 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7593701847585442549:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:01:28.587554Z node 37 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:01:28.821456Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:29.072361Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:01:29.330073Z node 37 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:29.355117Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:01:29.355258Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:01:29.385534Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:01:29.593304Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:01:29.665284Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:01:29.690934Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:01:29.690963Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:01:29.690978Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:01:29.691126Z node 37 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:01:30.278106Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:01:30.558386Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) |82.6%| [TM] {BAZEL_UPLOAD} ydb/services/rate_limiter/ut/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] >> test.py::TestViewer::test_viewer_nodelist [GOOD] >> test.py::TestViewer::test_viewer_nodes >> test.py::TestViewer::test_viewer_nodes [GOOD] >> test.py::TestViewer::test_viewer_nodes_all >> test.py::TestViewer::test_viewer_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> test.py::TestViewer::test_viewer_storage_nodes_no_database_filter_node_id [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes >> test.py::TestViewer::test_viewer_storage_nodes [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |82.6%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> test.py::TestViewer::test_viewer_storage_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_nodes_group >> test.py::TestViewer::test_viewer_nodes_group [GOOD] >> test.py::TestViewer::test_storage_groups [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_pool_name [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_capacity_alert [GOOD] >> test.py::TestViewer::test_viewer_sysinfo >> test.py::TestViewer::test_viewer_sysinfo [GOOD] >> test.py::TestViewer::test_viewer_vdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_pdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_bsgroupinfo >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> test.py::TestViewer::test_viewer_bsgroupinfo [GOOD] >> test.py::TestViewer::test_viewer_tabletinfo >> test.py::TestViewer::test_viewer_tabletinfo [GOOD] >> test.py::TestViewer::test_viewer_describe >> test.py::TestViewer::test_viewer_describe [GOOD] >> test.py::TestViewer::test_viewer_cluster >> test.py::TestViewer::test_viewer_cluster [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo_db >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> test.py::TestViewer::test_viewer_tenantinfo_db [GOOD] >> test.py::TestViewer::test_viewer_healthcheck >> test.py::TestViewer::test_viewer_healthcheck [GOOD] >> test.py::TestViewer::test_viewer_acl >> TTxDataShardMiniKQL::ReadConstant >> test.py::TestViewer::test_viewer_acl [GOOD] >> test.py::TestViewer::test_viewer_acl_write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 6267787028321684783 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2026-01-10T11:55:33.263780Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-01-10T11:55:34.388653Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2026-01-10T11:55:38.467686Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2026-01-10T11:55:40.941566Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2026-01-10T11:55:41.365946Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-01-10T11:55:41.371221Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16040921754905261768] 2026-01-10T11:55:41.671248Z 1 00h02m15.161024s :BS_PROXY ERROR: StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3 Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4). GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3" ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: ... 1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 3 2026-01-10T12:01:04.019736Z 1 00h25m01.150000s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 1 2026-01-10T12:01:04.486929Z 1 00h25m11.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 0 2026-01-10T12:01:08.220925Z 9 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:116288:349] ServerId# [1:117315:169] TabletId# 72057594037932033 PipeClientId# [9:116288:349] 2026-01-10T12:01:08.221197Z 8 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:146919:17] ServerId# [1:146928:4108] TabletId# 72057594037932033 PipeClientId# [8:146919:17] 2026-01-10T12:01:08.221314Z 7 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:145873:17] ServerId# [1:145879:3981] TabletId# 72057594037932033 PipeClientId# [7:145873:17] 2026-01-10T12:01:08.221468Z 6 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:122917:17] ServerId# [1:122923:1015] TabletId# 72057594037932033 PipeClientId# [6:122917:17] 2026-01-10T12:01:08.221682Z 5 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:142960:17] ServerId# [1:142967:3598] TabletId# 72057594037932033 PipeClientId# [5:142960:17] 2026-01-10T12:01:08.221821Z 4 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:151906:17] ServerId# [1:151915:4708] TabletId# 72057594037932033 PipeClientId# [4:151906:17] 2026-01-10T12:01:08.222072Z 3 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:141872:17] ServerId# [1:141881:3475] TabletId# 72057594037932033 PipeClientId# [3:141872:17] 2026-01-10T12:01:08.222214Z 2 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:150943:17] ServerId# [1:150949:4599] TabletId# 72057594037932033 PipeClientId# [2:150943:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 7 2026-01-10T12:01:12.593643Z 1 00h26m21.163584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999829} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Starting nodes Start compaction 1 Start checking |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> test.py::TestViewer::test_viewer_acl_write [GOOD] >> test.py::TestViewer::test_viewer_autocomplete >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> test.py::TestViewer::test_viewer_autocomplete [GOOD] >> test.py::TestViewer::test_viewer_check_access >> TMLPReaderTests::TopicWithManyIterationsData [GOOD] >> TMLPReaderTests::TopicWithBigMessage >> test.py::TestViewer::test_viewer_check_access [GOOD] >> test.py::TestViewer::test_viewer_query >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> MediatorTest::ResendNotSubset [GOOD] >> test.py::TestViewer::test_viewer_query [GOOD] >> test.py::TestViewer::test_viewer_query_from_table >> DescribeSchemaSecretsService::FailWithoutGrants [GOOD] >> DescribeSchemaSecretsService::GroupGrants >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> test.py::TestViewer::test_viewer_query_from_table [GOOD] >> test.py::TestViewer::test_viewer_query_from_table_different_schemas >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> MediatorTest::OneCoordinatorResendTxNotLost >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> TestPurecalcFilter::WatermarkWhereFalse [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> test.py::TestViewer::test_viewer_query_from_table_different_schemas [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> test.py::TestViewer::test_viewer_query_issue_13757 [GOOD] >> TestRawParser::Simple >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13945 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> TestRawParser::Simple [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2026-01-10T12:01:50.259690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:01:50.259766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:01:50.260524Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:01:50.277368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:01:50.277875Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:01:50.278179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:01:50.324803Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:01:50.346186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:01:50.348796Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:01:50.350835Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:01:50.350934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:01:50.350995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:01:50.351466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:01:50.351709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:01:50.351823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:01:50.450193Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:01:50.492761Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:01:50.493030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:01:50.493173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:01:50.493217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:01:50.493287Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:01:50.493331Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:01:50.493622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:50.493705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:50.494118Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:01:50.494280Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:01:50.494384Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:01:50.494424Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:01:50.494463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:01:50.494505Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:01:50.494540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:01:50.494596Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:01:50.494651Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:01:50.494758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:50.494814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:50.494870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:01:50.508432Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:01:50.508531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:01:50.508648Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:01:50.508920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:01:50.509000Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:01:50.509089Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:01:50.509146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:01:50.509213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:01:50.509252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:01:50.509292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:01:50.509656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:01:50.509707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:01:50.509743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:01:50.509802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:01:50.509903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:01:50.509955Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:01:50.509998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:01:50.510032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:01:50.510064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:01:50.524804Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:01:50.524905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:01:50.524942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:01:50.525017Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:01:50.525107Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:01:50.525760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:50.525844Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:50.525903Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:01:50.526102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:01:50.526153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:01:50.526332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:01:50.526391Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:01:50.526432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:01:50.526479Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:01:50.531029Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:01:50.531123Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:01:50.531421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:50.531472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:50.531538Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:01:50.531588Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:01:50.531622Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:01:50.531670Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:01:50.531708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 26-01-10T12:01:52.640606Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-10T12:01:52.640729Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-10T12:01:52.640886Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:238:2230], Recipient [3:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:52.640936Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:01:52.641309Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:01:52.641428Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:01:52.641577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 238 RawX2: 12884904118 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-10T12:01:52.641661Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [3:26:2073], Recipient [3:238:2230]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-10T12:01:52.641704Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T12:01:52.641743Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-10T12:01:52.641792Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:01:52.641879Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [3:26:2073], Recipient [3:238:2230]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-10T12:01:52.641934Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-10T12:01:52.641975Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-10T12:01:52.642060Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:01:52.642106Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:01:52.642239Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:01:52.642280Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:01:52.642320Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:01:52.642440Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:01:52.642508Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:01:52.642629Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:288:2271] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:01:52.642670Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:01:52.642760Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [3:126:2151], Recipient [3:238:2230]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-10T12:01:52.642800Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-10T12:01:52.642843Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-10T12:01:52.642905Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-10T12:01:52.658507Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:284:2267] ServerId: [3:288:2271] } 2026-01-10T12:01:52.658576Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T12:01:52.702860Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-10T12:01:52.702946Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-10T12:01:52.703265Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:294:2275], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:52.703307Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:01:52.703352Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:292:2274], serverId# [3:294:2275], sessionId# [0:0:0] 2026-01-10T12:01:52.703539Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-10T12:01:52.703576Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:01:52.703679Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:01:52.708734Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-10T12:01:52.708878Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:01:52.708927Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-10T12:01:52.708967Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:01:52.709006Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:01:52.709048Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:01:52.709111Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-10T12:01:52.709153Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:01:52.709178Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:01:52.709207Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:01:52.709233Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-10T12:01:52.709262Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:01:52.709285Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:01:52.709311Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:01:52.709337Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:01:52.709774Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-10T12:01:52.709857Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:01:52.709949Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:01:52.709982Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:01:52.710011Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-10T12:01:52.710040Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:01:52.710084Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:01:52.710149Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayComplete 2026-01-10T12:01:52.710182Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-10T12:01:52.710218Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-10T12:01:52.710257Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-10T12:01:52.710299Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:01:52.710324Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-10T12:01:52.710352Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-10T12:01:52.710429Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:01:52.710472Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:01:52.710515Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> TestRawParser::ManyValues >> TestRawParser::ManyValues [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> TestRawParser::ChangeParserSchema >> test.py::TestViewer::test_viewer_query_issue_13945 [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> TestRawParser::ChangeParserSchema [GOOD] >> TestRawParser::TypeKindsValidation >> test.py::TestViewer::test_pqrb_tablet >> TestRawParser::TypeKindsValidation [GOOD] >> KeyValueGRPCService::SimpleWriteReadV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f17/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit_log.82rhf1wv.txt 2026-01-10T12:01:36.483237Z: {"paths":"[/Root/test_auditlog.py]","tx_id":"281474976720663","acl_add":"[+U:other-user@builtin]","database":"{none}","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusSuccess","operation":"MODIFY ACL","component":"schemeshard"} 2026-01-10T12:01:36.733587Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:36.733546Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-10T12:01:36.578255Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:01:37.219467Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:37.219429Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-10T12:01:36.907600Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:01:37.645692Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:37.645655Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-10T12:01:37.335166Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:01:38.254349Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:38.254318Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:01:37.776555Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:01:38.545691Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:38.545661Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-10T12:01:38.365336Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:01:38.844735Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:01:38.844701Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-10T12:01:38.657972Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test.py::TestViewer::test_pqrb_tablet [GOOD] >> test.py::TestViewer::test_viewer_nodes_issue_14992 >> test.py::TestViewer::test_viewer_nodes_issue_14992 [GOOD] >> test.py::TestViewer::test_operations_list >> test.py::TestViewer::test_operations_list [GOOD] >> test.py::TestViewer::test_operations_list_page >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> test.py::TestViewer::test_operations_list_page [GOOD] >> test.py::TestViewer::test_operations_list_page_bad [GOOD] >> test.py::TestViewer::test_scheme_directory >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> test.py::TestViewer::test_scheme_directory [GOOD] >> test.py::test[solomon-UnknownSetting-] [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> test.py::TestViewer::test_topic_data >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/oom/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> TTxDataShardFilterKMeansScan::BuildToPosting [GOOD] >> TTxDataShardFilterKMeansScan::BuildToBuild >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2026-01-10T11:58:23.459562Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-10T11:58:23.459836Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-10T11:58:23.459848Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-10T11:58:23.460003Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-10T11:58:23.460149Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7593701052486743938:2051] 2026-01-10T11:58:38.927385Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7593701052486743938:2051] [id 1]: Started compile request 2026-01-10T11:58:39.824844Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7593701052486743938:2051] [id 1]: Compilation completed for request 2026-01-10T11:58:39.824948Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [1:7593701052486743938:2051] 2026-01-10T11:58:39.828231Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-10T11:58:39.828258Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T11:58:39.828292Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-10T11:58:39.828354Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; 2026-01-10T11:58:39.836726Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [1:0:0] 2026-01-10T11:58:39.836763Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; ' (client id: [1:0:0]) 2026-01-10T11:58:39.836800Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 2 2026-01-10T11:58:39.836886Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7593701052486743938:2051] 2026-01-10T11:58:39.836948Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7593701052486743938:2051] [id 2]: Started compile request 2026-01-10T11:58:39.860935Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7593701052486743938:2051] [id 2]: Compilation completed for request 2026-01-10T11:58:39.861037Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 2 from [1:7593701052486743938:2051] 2026-01-10T11:58:39.861254Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 2 2026-01-10T11:58:39.861273Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T11:58:39.861299Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [1:0:0] 2026-01-10T11:58:39.861340Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:439: GenerateSql: No sql was generated 2026-01-10T11:58:39.861355Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [2:0:0] 2026-01-10T11:58:40.822239Z node 2 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-10T11:58:40.822405Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-10T11:58:40.822427Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-10T11:58:40.822458Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-10T11:58:40.825935Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7593701124497716822:2051] 2026-01-10T11:58:55.646859Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [2:7593701124497716822:2051] [id 1]: Started compile request 2026-01-10T11:58:55.668829Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [2:7593701124497716822:2051] [id 1]: Compilation completed for request 2026-01-10T11:58:55.668932Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [2:7593701124497716822:2051] 2026-01-10T11:58:55.669062Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-10T11:58:55.669079Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T11:58:55.669105Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-10T11:58:55.669146Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-10T11:58:55.669179Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2026-01-10T11:58:55.919262Z node 3 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-10T11:58:55.919415Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-10T11:58:55.919426Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-10T11:58:55.919469Z node 3 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-10T11:58:55.919533Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7593701187534395066:2051] 2026-01-10T11:58:59.177971Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [3:7593701187534395066:2051] [id 1]: Started compile request 2026-01-10T11:58:59.182774Z node 3 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [3:7593701187534395066:2051] [id 1]: Compilation failed for request 2026-01-10T11:58:59.182889Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [3:7593701187534395066:2051] 2026-01-10T11:58:59.183192Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-10T11:58:59.183274Z node 3 :FQ_ROW_DISPATCHER ERROR: purecalc_filter.cpp:375: TProgramCompileHandler: Program compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:3:27: Error: extraneous input '(' expecting {, ';'} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; } } 2026-01-10T11:58:59.536994Z node 4 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-10T11:58:59.538108Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-10T11:58:59.538314Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; ' (client id: [0:0:0]) 2026-01-10T11:58:59.538364Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-10T11:58:59.538462Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [4:7593701208046329620:2051] 2026-01-10T11:59:02.701655Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [4:7593701208046329620:2051] [id 1]: Started compile request 2026-01-10T11:59:03.488345Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [4:7593701208046329620:2051] [id 1]: Compilation completed for request 2026-01-10T11:59:03.488438Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [4:7593701208046329620:2051] 2026-01-10T11:59:03.490431Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-10T11:59:03.490453Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T11:59:03.490484Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-10T11:59:03.491400Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 1 clients, number rows: 2 2026-01-10T11:59:03.491412Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 2 rows to purecalc filter (client id: [0:0:0]) 2026-01-10T11:59:03.491789Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T11:59:03.547638Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 1 clients, number rows: 2 2026-01-10T11:59:03.547684Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 2 rows to purecalc filter (client id: [0:0:0]) 2026-01-10T11:59:03.547694Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T11:59:03.547759Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2026-01-10T11:59:03.547789Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 0 clients, number rows: 2 2026-01-10T11:59:04.274147Z node 5 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-10T11:59:04. ... > 100, FALSE) AS _filter, _offset FROM Input; 2026-01-10T12:00:41.324878Z node 41 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:00:41.372577Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [41:7593701643187454628:2051] 2026-01-10T12:00:54.610091Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [41:7593701643187454628:2051] [id 0]: Started compile request 2026-01-10T12:00:54.637602Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [41:7593701643187454628:2051] [id 0]: Compilation completed for request 2026-01-10T12:00:54.637728Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [41:7593701643187454628:2051] 2026-01-10T12:00:55.238682Z node 42 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2026-01-10T12:00:55.239014Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:00:55.268224Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [42:7593701704477254636:2051] 2026-01-10T12:01:00.429367Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [42:7593701704477254636:2051] [id 0]: Started compile request 2026-01-10T12:01:00.469711Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [42:7593701704477254636:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:00.469840Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [42:7593701704477254636:2051] 2026-01-10T12:01:00.470025Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T12:01:00.470125Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:00.470210Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:00.470245Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:00.470276Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:00.470307Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:01.175035Z node 43 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 is null, FALSE) AS _filter, _offset FROM Input; 2026-01-10T12:01:01.175389Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:01.182807Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [43:7593701728821359951:2051] 2026-01-10T12:01:08.783264Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [43:7593701728821359951:2051] [id 0]: Started compile request 2026-01-10T12:01:08.851903Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [43:7593701728821359951:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:08.852035Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [43:7593701728821359951:2051] 2026-01-10T12:01:08.855416Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T12:01:08.855546Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-10T12:01:09.789709Z node 44 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 50, FALSE) AS _filter, _offset FROM Input; 2026-01-10T12:01:09.790065Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:09.793091Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [44:7593701764777821682:2051] 2026-01-10T12:01:14.900194Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [44:7593701764777821682:2051] [id 0]: Started compile request 2026-01-10T12:01:14.982673Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [44:7593701764777821682:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:14.982775Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [44:7593701764777821682:2051] 2026-01-10T12:01:14.982969Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T12:01:14.983073Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-10T12:01:15.932842Z node 45 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-10T12:01:15.933129Z node 45 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:15.934577Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [45:7593701791164656395:2051] 2026-01-10T12:01:22.766060Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [45:7593701791164656395:2051] [id 0]: Started compile request 2026-01-10T12:01:22.770233Z node 45 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [45:7593701791164656395:2051] [id 0]: Compilation failed for request 2026-01-10T12:01:23.688813Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:439: GenerateSql: No sql was generated 2026-01-10T12:01:23.688945Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-10T12:01:23.688968Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 1 rows to client [0:0:0] without processing 2026-01-10T12:01:23.689030Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-10T12:01:23.689042Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 1 rows to client [0:0:0] without processing 2026-01-10T12:01:24.185606Z node 47 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-10T12:01:24.185948Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:24.209224Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [47:7593701828229915443:2051] 2026-01-10T12:01:34.188866Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [47:7593701828229915443:2051] [id 0]: Started compile request 2026-01-10T12:01:34.301149Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [47:7593701828229915443:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:34.301282Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [47:7593701828229915443:2051] 2026-01-10T12:01:34.304341Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T12:01:34.304467Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:35.067098Z node 48 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-10T12:01:35.067340Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:35.067408Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [48:7593701876216856506:2051] 2026-01-10T12:01:44.390769Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [48:7593701876216856506:2051] [id 0]: Started compile request 2026-01-10T12:01:44.430006Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [48:7593701876216856506:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:44.430119Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [48:7593701876216856506:2051] 2026-01-10T12:01:44.430432Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-10T12:01:44.430532Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:44.430649Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-10T12:01:45.396746Z node 49 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(FALSE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-10T12:01:45.397085Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-10T12:01:45.400622Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [49:7593701919634711823:2051] 2026-01-10T12:01:53.018052Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [49:7593701919634711823:2051] [id 0]: Started compile request 2026-01-10T12:01:53.061921Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [49:7593701919634711823:2051] [id 0]: Compilation completed for request 2026-01-10T12:01:53.062043Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [49:7593701919634711823:2051] 2026-01-10T12:01:55.516196Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 1 messages to parse 2026-01-10T12:01:55.516241Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2026-01-10T12:01:55.516473Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 1 messages to parse 2026-01-10T12:01:55.516492Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello2__large_str", "a2": 101, "event": "event2"} |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:00:14.620481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:14.972481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:00:14.980648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:14.981119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:14.981230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:00:15.668715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:00:15.669164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:00:15.681935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046399327604 != 1768046399327607 2026-01-10T12:00:15.730887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:00:15.805769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:00:16.010915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:00:16.607526Z node 1 :TX_MEDIATOR INFO: mediator__schema.cpp:23: tablet# 72057594047365120 TTxSchema Complete 2026-01-10T12:00:16.611100Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:88: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2026-01-10T12:00:16.615284Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:853:2738] connected 2026-01-10T12:00:16.615392Z node 1 :TX_MEDIATOR NOTICE: mediator_impl.cpp:133: tablet# 72057594047365120 actor# [1:836:2728] HANDLE TEvMediatorConfiguration Version# 1 2026-01-10T12:00:16.615826Z node 1 :TX_MEDIATOR DEBUG: mediator__configure.cpp:77: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2026-01-10T12:00:16.615999Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:64: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2026-01-10T12:00:16.616460Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:859:2743] connected 2026-01-10T12:00:16.616521Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-10T12:00:16.616549Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:175: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:857:2742] bucket# 0 ... waiting for watcher to connect (done) 2026-01-10T12:00:16.616689Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-10T12:00:16.616749Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:159: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvWatch 2026-01-10T12:00:16.616781Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:164: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:856:2741] bucket.ActiveActor 2026-01-10T12:00:16.617160Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:380: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:857:2742]} 2026-01-10T12:00:16.618037Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:391: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2026-01-10T12:00:16.632090Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:863:2747] connected 2026-01-10T12:00:16.632991Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-10T12:00:16.633178Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:861:2745] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 951 Coordinator# 72057594046316545 2026-01-10T12:00:16.634783Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2026-01-10T12:00:16.635152Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1000 2026-01-10T12:00:16.635431Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2026-01-10T12:00:16.636179Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2026-01-10T12:00:16.659701Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-10T12:00:16.659760Z node 1 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-10T12:00:16.659831Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [1:855:2740] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2026-01-10T12:00:16.659941Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2026-01-10T12:00:16.659976Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2026-01-10T12:00:16.660006Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND Ev to# [1:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2026-01-10T12:00:16.660069Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1010 2026-01-10T12:00:16.660157Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}} marker# M4 2026-01-10T12:00:16.660298Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-10T12:00:16.667689Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:885:2759] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:00:16.668102Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-10T12:00:16.668284Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 ... waiting for blocked plan step (done) ... waiting for no pending commands 2026-01-10T12:00:16.670688Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-10T12:00:16.671572Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:189: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:857:2742] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2026-01-10T12:00:16.673032Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:342: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2026-01-10T12:00:16.673452Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:415: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:861:2745] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2026-01-10T12:00:16.674481Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) ... waiting for SysViewsRoster update finished 2026-01-10T12:00:50.503796Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:00:50.633453Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:00:50.633733Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:00:50.633774Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:00:51.796562Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:00:51.796670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:00:51.820529Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046420317284 != 1768046420317288 2026-01-10T12:00:51.824982Z node 2 :IMPOR ... EDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:901:2767] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 950 Coordinator# 72057594046316546 2026-01-10T12:01:57.770444Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-10T12:01:57.770508Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-10T12:01:57.770620Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-10T12:01:57.770651Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-10T12:01:57.770766Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [12:855:2740] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2026-01-10T12:01:57.770893Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2026-01-10T12:01:57.770950Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-10T12:01:57.771009Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2026-01-10T12:01:57.771064Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-10T12:01:57.771099Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2026-01-10T12:01:57.771138Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:856:2741] bucket.ActiveActor step# 1010 2026-01-10T12:01:57.771237Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-10T12:01:57.771458Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-10T12:01:57.771621Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-10T12:01:57.772467Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:909:2773] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:01:57.772534Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-10T12:01:57.772576Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-10T12:01:57.772643Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-10T12:01:57.773042Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:910:2774] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:01:57.773089Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-10T12:01:57.773122Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-10T12:01:57.773153Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-10T12:01:57.784216Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [12:913:2777] connected 2026-01-10T12:01:57.784383Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-10T12:01:57.784438Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:911:2775] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 951 Coordinator# 72057594046316546 2026-01-10T12:01:57.784697Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-10T12:01:57.784750Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-10T12:01:57.784844Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:223: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:855:2740] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2026-01-10T12:01:57.784958Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:130: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:911:2775] 2026-01-10T12:01:57.785007Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2026-01-10T12:01:57.785080Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2026-01-10T12:01:57.785147Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2026-01-10T12:01:57.785202Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2026-01-10T12:01:57.785294Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-10T12:01:57.785373Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-10T12:01:57.797639Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:905:2771] ServerId: [12:909:2773] } 2026-01-10T12:01:57.815418Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:934:2787] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-10T12:01:57.815530Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-10T12:01:57.815592Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-10T12:01:57.815650Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-10T12:01:57.826701Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:01:57.863333Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:906:2772] ServerId: [12:910:2774] } 2026-01-10T12:01:57.879503Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:973:2803] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-10T12:01:57.879601Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-10T12:01:57.879648Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-10T12:01:57.879691Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/mediator/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection |82.6%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/solomon/py3test |82.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |82.6%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |82.6%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest |82.7%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |82.7%| [TM] {RESULT} ydb/tests/fq/solomon/py3test |82.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/add_column/py3test >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/add_column/py3test |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |82.7%| [TM] {RESULT} ydb/tests/datashard/add_column/py3test |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TNodeBrokerTest::UpdateEpochPipelining >> test.py::TestViewer::test_topic_data [GOOD] >> test.py::TestViewer::test_topic_data_cdc |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> test.py::TestViewer::test_topic_data_cdc [GOOD] >> test.py::TestViewer::test_async_replication_describe >> ColumnBuildTest::Locking_Failed >> ColumnBuildTest::BaseCase >> test.py::TestViewer::test_async_replication_describe [GOOD] >> test.py::TestViewer::test_transfer_describe >> ColumnBuildTest::Cancelling >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test.py::TestViewer::test_transfer_describe [GOOD] >> test.py::TestViewer::test_viewer_query_long >> QuoterWithKesusTest::PrefetchCoefficient [FAIL] >> QuoterWithKesusTest::GetsQuotaAfterPause >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> ColumnBuildTest::DoNotRestoreDeletedRows >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> DescribeSchemaSecretsService::GroupGrants [GOOD] >> DescribeSchemaSecretsService::BatchRequest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> TMLPReaderTests::TopicWithBigMessage [GOOD] >> TMLPReaderTests::TopicWithKeepMessageOrder >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> ColumnBuildTest::Locking_Failed [GOOD] >> ColumnBuildTest::Locking_DisableFlag >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2026-01-10T12:02:09.260446Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012365s 2026-01-10T12:02:09.272519Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008419s 2026-01-10T12:02:09.308359Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008312s 2026-01-10T12:02:09.461382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:09.461457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:02:11.432696Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> TTxDataShardFilterKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test.py::TestViewer::test_viewer_query_long [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::Cancellation_Applying_DisableFlag >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TColumnShardTestReadWrite::CompactionGC >> ColumnBuildTest::Locking_DisableFlag [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart [GOOD] >> test.py::TestViewer::test_viewer_query_event_stream >> test.py::TestViewer::test_viewer_query_event_stream [GOOD] >> test.py::TestViewer::test_security >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> test.py::TestViewer::test_security [GOOD] >> test.py::TestViewer::test_storage_stats >> ColumnBuildTest::Filling_Failed |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Locking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:10.397870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:10.397987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:10.398028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:10.398069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:10.398112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:10.398143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:10.398237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:10.398306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:10.399186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:10.399496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:10.503585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:10.503669Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:10.527412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:10.527545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:10.527787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:10.540650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:10.540884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:10.541427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:10.541662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:10.544092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:10.544286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:10.545303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:10.545370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:10.545442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:10.545499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:10.545585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:10.545807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:10.695980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.696956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.697923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:19.201758Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-10T12:02:19.201791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:19.201829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-10T12:02:19.201903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1561:3429] message: TxId: 281474976730758 2026-01-10T12:02:19.201952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:19.201990Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-10T12:02:19.202033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-10T12:02:19.202112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:19.206760Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-10T12:02:19.206891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-10T12:02:19.206988Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-10T12:02:19.207131Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-10T12:02:19.216917Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-10T12:02:19.217112Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:19.217190Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:02:19.219845Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-10T12:02:19.220022Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:19.220097Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:19.220277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:19.220334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1752:3607] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:19.221098Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:19.221518Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:19.222575Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:19.222849Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 295us result status StatusSuccess 2026-01-10T12:02:19.223411Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> test.py::TestViewer::test_storage_stats [GOOD] >> test.py::TestViewer::test_viewer_peers >> test.py::TestViewer::test_viewer_peers [GOOD] >> ColumnBuildTest::Rejecting >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> ColumnBuildTest::Initiating_Failed >> ColumnBuildTest::Cancelling [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag >> ColumnBuildTest::DoNotRestoreDeletedRows [GOOD] >> ColumnBuildTest::DisabledAndEnabledFlag |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> ColumnBuildTest::Applying_Failed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] Test command err: 2026-01-10T11:59:59.116824Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-10T11:59:59.116980Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-10T11:59:59.117036Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-10T11:59:59.117351Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-10T11:59:59.117380Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-10T11:59:59.117432Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-10T11:59:59.117570Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-10T11:59:59.121424Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-10T11:59:59.121479Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-10T11:59:59.121510Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-10T11:59:59.130254Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-10T11:59:59.130338Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-10T11:59:59.130403Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-10T11:59:59.130589Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.130631Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-10T11:59:59.130676Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.130707Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-10T11:59:59.130765Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.130796Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-10T11:59:59.130821Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-10T11:59:59.130915Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-10T11:59:59.130948Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-10T11:59:59.131073Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-10T11:59:59.131111Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2026-01-10T11:59:59.131173Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-10T11:59:59.131213Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2026-01-10T11:59:59.131251Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-10T11:59:59.131318Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-10T11:59:59.131349Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-10T11:59:59.131390Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-10T11:59:59.209018Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-10T11:59:59.209125Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-10T11:59:59.209173Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-10T11:59:59.209403Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-10T11:59:59.209432Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-10T11:59:59.209459Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-10T11:59:59.209521Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-10T11:59:59.209699Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-10T11:59:59.209726Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-10T11:59:59.209753Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-10T11:59:59.209893Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-10T11:59:59.209960Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-10T11:59:59.210016Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-10T11:59:59.210191Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.210223Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-10T11:59:59.210257Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.210353Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-10T11:59:59.210389Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.210416Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-10T11:59:59.210440Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-10T11:59:59.210512Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-10T11:59:59.210546Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-10T11:59:59.210681Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-10T11:59:59.210722Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2026-01-10T11:59:59.210758Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-10T11:59:59.210790Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2026-01-10T11:59:59.210830Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-10T11:59:59.210885Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-10T11:59:59.210921Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-10T11:59:59.210952Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-10T11:59:59.288688Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-10T11:59:59.288800Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-10T11:59:59.288838Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Wa ... -01-10T11:59:59.375028Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2026-01-10T11:59:59.375065Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.375094Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2026-01-10T11:59:59.375135Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-10T11:59:59.375186Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-10T11:59:59.375214Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-10T11:59:59.375306Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2026-01-10T11:59:59.375338Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2026-01-10T11:59:59.375370Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2026-01-10T11:59:59.375418Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2026-01-10T11:59:59.375447Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-10T11:59:59.375538Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2026-01-10T11:59:59.375574Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:3] Checkpoint completed 2026-01-10T11:59:59.375605Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-10T11:59:59.375652Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-10T11:59:59.375705Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2026-01-10T11:59:59.375732Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-10T11:59:59.375802Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.375830Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2026-01-10T11:59:59.375876Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.375906Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2026-01-10T11:59:59.375943Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.375970Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2026-01-10T11:59:59.375997Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-10T11:59:59.376042Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-10T11:59:59.376106Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-10T11:59:59.376237Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2026-01-10T11:59:59.376270Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2026-01-10T11:59:59.376318Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2026-01-10T11:59:59.376351Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2026-01-10T11:59:59.376377Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-10T11:59:59.376429Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2026-01-10T11:59:59.376458Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:4] Checkpoint completed 2026-01-10T11:59:59.446081Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-10T11:59:59.446203Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-10T11:59:59.446253Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-10T11:59:59.446518Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-10T11:59:59.446542Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-10T11:59:59.446570Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-10T11:59:59.446625Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-10T11:59:59.446787Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-10T11:59:59.446819Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-10T11:59:59.446851Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-10T11:59:59.446977Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-10T11:59:59.447002Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-10T11:59:59.447046Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-10T11:59:59.447168Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-10T11:59:59.447196Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-10T11:59:59.447249Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-10T11:59:59.447273Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-10T11:59:59.447309Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-10T11:59:59.447339Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-10T11:59:59.447357Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2026-01-10T11:59:59.447408Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:590: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2026-01-10T11:59:59.447430Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:596: [my-graph-id.42] [42:1] Checkpoint aborted 2026-01-10T11:59:59.447464Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-10T11:59:59.447491Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-10T11:59:59.447537Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2026-01-10T11:59:59.447557Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-10T11:59:59.513388Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-10T11:59:59.513479Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 0, ActorsToNotify count: 1, ActorsToWaitFor count: 2 2026-01-10T11:59:59.513505Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:107: [my-graph-id.42] No ingress tasks, coordinator was disabled 2026-01-10T11:59:59.513527Z node 6 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/checkpointing/ut/unittest >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> ColumnBuildTest::Filling_Failed [GOOD] >> ColumnBuildTest::Filling_DisableFlag |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> ColumnBuildTest::AlterMainTable_Failed >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:10.380380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:10.380462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:10.380517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:10.380557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:10.380608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:10.380669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:10.380767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:10.380847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:10.381708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:10.382006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:10.535795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:10.535873Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:10.561475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:10.561621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:10.561824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:10.573177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:10.573430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:10.574225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:10.574471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:10.579178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:10.579371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:10.580613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:10.580680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:10.580768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:10.580836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:10.580939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:10.581172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:10.733158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.734992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.735055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.735123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.735205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.735301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:10.735375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730757:0, at schemeshard: 72075186233409549 2026-01-10T12:02:26.061432Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730757:0 ProgressState 2026-01-10T12:02:26.061541Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-10T12:02:26.061588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:26.061634Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-10T12:02:26.061665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:26.061706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-10T12:02:26.061775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2608:4368] message: TxId: 281474976730757 2026-01-10T12:02:26.061824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:26.061862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-10T12:02:26.061895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-10T12:02:26.061973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-10T12:02:26.074514Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-10T12:02:26.074621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-10T12:02:26.074706Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730757 2026-01-10T12:02:26.074853Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-10T12:02:26.082870Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-10T12:02:26.083047Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:26.083155Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-10T12:02:26.086408Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-10T12:02:26.086595Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:26.086662Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-10T12:02:26.086830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:02:26.086885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2677:4426] TestWaitNotification: OK eventTxId 106 2026-01-10T12:02:26.087905Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-10T12:02:26.088283Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:26.089448Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:26.089743Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 339us result status StatusSuccess 2026-01-10T12:02:26.090312Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_Failed [GOOD] >> ColumnBuildTest::Initiating_DisableFlag >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::TooManyClusters >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:11.503953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:11.504041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:11.504103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:11.504145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:11.504179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:11.504212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:11.504288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:11.504371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:11.505210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:11.505482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:11.597980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:11.598044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:11.618457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:11.618614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:11.618857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:11.638009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:11.638250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:11.639114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:11.639345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:11.641818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:11.641985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:11.643206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:11.643262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:11.643324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:11.643369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:11.643483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:11.643747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:11.838650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.839625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.839761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.839869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.839947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.840985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 88Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-10T12:02:27.157420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-01-10T12:02:27.157445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-10T12:02:27.157476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2026-01-10T12:02:27.157560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1676:3544] message: TxId: 281474976730761 2026-01-10T12:02:27.157602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-10T12:02:27.157632Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2026-01-10T12:02:27.157659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730761:0 2026-01-10T12:02:27.157729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:27.160207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2026-01-10T12:02:27.160269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730761 2026-01-10T12:02:27.160326Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730761 2026-01-10T12:02:27.160427Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1738:3594], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2026-01-10T12:02:27.161958Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-10T12:02:27.162076Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1738:3594], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:27.162133Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:02:27.163506Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-10T12:02:27.163622Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1738:3594], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:27.163658Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:27.163776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:27.163826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1755:3610] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:27.164476Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:27.164823Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-10T12:02:27.165544Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:27.165781Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 256us result status StatusSuccess 2026-01-10T12:02:27.166256Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> ColumnBuildTest::ValidDefaultValue |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns [GOOD] >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> ColumnBuildTest::Applying_Failed [GOOD] >> ColumnBuildTest::Applying_DisableFlag |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> ColumnBuildTest::Filling_DisableFlag [GOOD] >> ColumnBuildTest::AlterMainTable_Failed [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:11.189810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:11.189894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:11.189949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:11.189990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:11.190030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:11.190069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:11.190144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:11.190208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:11.191043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:11.191344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:11.324464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:11.324537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:11.339127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:11.345200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:11.345405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:11.352444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:11.352834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:11.353540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:11.353817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:11.357316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:11.357505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:11.358730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:11.358791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:11.358899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:11.358942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:11.359041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:11.359302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:11.615874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.617623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.617803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.617871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.617977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:11.618854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730758:0, at schemeshard: 72075186233409549 2026-01-10T12:02:30.065540Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730758:0 ProgressState 2026-01-10T12:02:30.065634Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-10T12:02:30.065666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:30.065704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-10T12:02:30.065741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:30.065789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-10T12:02:30.065861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2462:4222] message: TxId: 281474976730758 2026-01-10T12:02:30.065910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:30.065940Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-10T12:02:30.065969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-10T12:02:30.066032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-10T12:02:30.073792Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-10T12:02:30.073883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-10T12:02:30.073952Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730758 2026-01-10T12:02:30.074083Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-10T12:02:30.076229Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-10T12:02:30.076407Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:30.076508Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-10T12:02:30.083932Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-10T12:02:30.084117Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:30.084157Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-10T12:02:30.084325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:02:30.084384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2532:4281] TestWaitNotification: OK eventTxId 106 2026-01-10T12:02:30.085265Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-10T12:02:30.085555Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:30.087559Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:30.087806Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 266us result status StatusSuccess 2026-01-10T12:02:30.088311Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Initiating_DisableFlag [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |82.8%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest |82.8%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> ColumnBuildTest::Rejecting [GOOD] >> ColumnBuildTest::Unlocking_DisableFlag >> DescribeSchemaSecretsService::BatchRequest [GOOD] >> DescribeSchemaSecretsService::BigBatchRequest >> EntityId::Order >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Filling_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:21.708511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:21.708626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:21.708679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:21.708719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:21.708759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:21.708788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:21.708846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:21.708918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:21.709764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:21.710085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:21.962890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:21.962964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:22.013750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:22.024799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:22.024996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:22.043983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:22.044218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:22.044967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:22.045429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:22.069694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:22.069902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:22.071234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:22.071299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:22.071408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:22.071455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:22.071559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:22.071805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:22.351034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.352955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.353021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.353097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.353201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.353382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:22.353480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 31.665466Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730759:0 ProgressState 2026-01-10T12:02:31.665557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-10T12:02:31.665588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-10T12:02:31.665628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-10T12:02:31.665659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-10T12:02:31.665696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730759, ready parts: 1/1, is published: true 2026-01-10T12:02:31.665766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1819:3686] message: TxId: 281474976730759 2026-01-10T12:02:31.665813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-10T12:02:31.665849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730759:0 2026-01-10T12:02:31.665883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730759:0 2026-01-10T12:02:31.665943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:31.675008Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730759 2026-01-10T12:02:31.675108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730759 2026-01-10T12:02:31.675177Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730759 2026-01-10T12:02:31.675334Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730759 2026-01-10T12:02:31.677461Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-10T12:02:31.677600Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:31.677687Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:02:31.679701Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-10T12:02:31.679865Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:31.679908Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:31.680047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:31.680120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1967:3821] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:31.680791Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:31.681126Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:31.682069Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:31.682467Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 416us result status StatusSuccess 2026-01-10T12:02:31.682923Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:23.851785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:23.851882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:23.851925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:23.851963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:23.852001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:23.852031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:23.852131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:23.852206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:23.853055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:23.853326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:24.028999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:24.029049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:24.052743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:24.052848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:24.053029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:24.072219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:24.072498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:24.073210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:24.073460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:24.077135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:24.077336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:24.078509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:24.078569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:24.078749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:24.078807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:24.078901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:24.078997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:24.374295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.375967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.376051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.376165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.376256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:24.376324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1474976730758 ready parts: 1/1 2026-01-10T12:02:32.178247Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-10T12:02:32.178271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:32.178316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-10T12:02:32.178391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1764:3631] message: TxId: 281474976730758 2026-01-10T12:02:32.178435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-10T12:02:32.178469Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-10T12:02:32.178510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-10T12:02:32.178580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:32.184914Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-10T12:02:32.185001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-10T12:02:32.185079Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-10T12:02:32.185205Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-10T12:02:32.187355Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-10T12:02:32.187520Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:32.187585Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:02:32.189734Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-10T12:02:32.189904Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:32.189951Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:32.190123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:32.190200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1824:3679] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:32.191045Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:32.191378Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:32.192431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:32.192728Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 347us result status StatusSuccess 2026-01-10T12:02:32.194163Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::ReadOnlyMode >> ColumnBuildTest::Applying_DisableFlag [GOOD] >> TExternalDataSourceTest::DropTableTwice |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:25.985126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:25.985243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:25.985286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:25.985324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:25.985361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:25.985389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:25.985458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:25.985537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:25.986370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:25.986677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:26.180534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:26.180595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:26.209267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:26.209376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:26.209573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:26.229048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:26.229267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:26.230010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:26.230244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:26.236264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:26.236446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:26.237552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:26.237609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:26.237668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:26.237715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:26.237807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:26.238074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:26.547484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.556930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:26.557555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:35.187642Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-10T12:02:35.187672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:35.187707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-10T12:02:35.187798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1842:3706] message: TxId: 281474976730757 2026-01-10T12:02:35.187867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:35.187908Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-10T12:02:35.187939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-10T12:02:35.188019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:35.192929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-10T12:02:35.193006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-10T12:02:35.193061Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-10T12:02:35.193161Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-10T12:02:35.196353Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-10T12:02:35.196570Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:35.196639Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:02:35.199253Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-10T12:02:35.199451Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:35.199517Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:35.199700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:35.199751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1900:3753] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:35.200546Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:35.200967Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-10T12:02:35.201914Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:35.202180Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 284us result status StatusSuccess 2026-01-10T12:02:35.202762Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TMLPReaderTests::TopicWithKeepMessageOrder [GOOD] >> TMLPWriterTests::TopicNotExists |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] Test command err: 2026-01-10T12:02:19.800217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:02:19.837288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:02:19.837607Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:02:19.845206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanSubColumnsPortions; 2026-01-10T12:02:19.845499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-10T12:02:19.845725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:02:19.845906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:02:19.846022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:02:19.846134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:02:19.846295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:02:19.846432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:02:19.846574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:02:19.846710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:02:19.846844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:02:19.846993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:02:19.847111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:02:19.847254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:02:19.896990Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:02:19.897225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanSubColumnsPortions; 2026-01-10T12:02:19.897281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:02:19.897624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2026-01-10T12:02:19.897753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2026-01-10T12:02:19.897845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2026-01-10T12:02:19.897951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2026-01-10T12:02:19.898140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanSubColumnsPortions;id=NO_VALUE_OPTIONAL; 2026-01-10T12:02:19.898224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-10T12:02:19.898272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-10T12:02:19.898443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:19.898596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:02:19.898659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:02:19.898707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-10T12:02:19.898808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:02:19.898899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:02:19.898942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:02:19.898978Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-10T12:02:19.899160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:19.899223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:02:19.899271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:02:19.899331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-10T12:02:19.899447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:02:19.899508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:02:19.899562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:02:19.899774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:02:19.899837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:02:19.899894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:02:19.899932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:02:19.899997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:02:19.900164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:02:19.900206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:02:19.900425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:02:19.900487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:02:19.900536Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:02:19.900716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:02:19.900761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:02:19.900790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstr ... };constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:02:33.268001Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=138;finished=1; 2026-01-10T12:02:33.268039Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:02:33.268130Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:02:33.268557Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:02:33.268722Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:138;schema=id: uint64 json_payload: binary;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.268761Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:02:33.268881Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=138; 2026-01-10T12:02:33.268942Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=138;batch_columns=id,json_payload; 2026-01-10T12:02:33.269119Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:288:2289];bytes=283016;rows=4162;faults=0;finished=0;fault=0;schema=id: uint64 json_payload: binary; 2026-01-10T12:02:33.269304Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.269435Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.269611Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.269739Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:02:33.269834Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.269936Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.270337Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:290:2290] finished for tablet 9437184 2026-01-10T12:02:33.270947Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:288:2289];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack"],"t":0.056},{"events":["l_task_result"],"t":0.781},{"events":["l_ProduceResults","f_Finish"],"t":0.783},{"events":["l_ack","l_processing","l_Finish"],"t":0.784}],"full":{"a":13400510,"name":"_full_task","f":13400510,"d_finished":0,"c":0,"l":14184648,"d":784138},"events":[{"name":"bootstrap","f":13400808,"d_finished":6601,"c":1,"l":13407409,"d":6601},{"a":14183959,"name":"ack","f":13456877,"d_finished":291334,"c":251,"l":14183886,"d":292023},{"a":14183949,"name":"processing","f":13407706,"d_finished":682548,"c":503,"l":14183890,"d":683247},{"name":"ProduceResults","f":13401789,"d_finished":547933,"c":756,"l":14184194,"d":547933},{"a":14184202,"name":"Finish","f":14184202,"d_finished":0,"c":0,"l":14184648,"d":446},{"name":"task_result","f":13407733,"d_finished":383268,"c":252,"l":14182393,"d":383268}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.271036Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:288:2289];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:02:33.271621Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:288:2289];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack"],"t":0.056},{"events":["l_task_result"],"t":0.781},{"events":["l_ProduceResults","f_Finish"],"t":0.783},{"events":["l_ack","l_processing","l_Finish"],"t":0.784}],"full":{"a":13400510,"name":"_full_task","f":13400510,"d_finished":0,"c":0,"l":14185339,"d":784829},"events":[{"name":"bootstrap","f":13400808,"d_finished":6601,"c":1,"l":13407409,"d":6601},{"a":14183959,"name":"ack","f":13456877,"d_finished":291334,"c":251,"l":14183886,"d":292714},{"a":14183949,"name":"processing","f":13407706,"d_finished":682548,"c":503,"l":14183890,"d":683938},{"name":"ProduceResults","f":13401789,"d_finished":547933,"c":756,"l":14184194,"d":547933},{"a":14184202,"name":"Finish","f":14184202,"d_finished":0,"c":0,"l":14185339,"d":1137},{"name":"task_result","f":13407733,"d_finished":383268,"c":252,"l":14182393,"d":383268}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:02:33.271727Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:02:32.484752Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=1409432;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=1409432;selected_rows=0; 2026-01-10T12:02:33.271792Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:02:33.271977Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;; |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:27.785336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:27.785443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:27.785483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:27.785526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:27.785576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:27.785609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:27.785686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:27.785781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:27.786679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:27.786971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:27.884554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:27.884625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:27.905400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:27.905512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:27.905672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:27.914251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:27.914441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:27.914997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:27.915203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:27.917016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:27.917173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:27.918031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:27.918077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:27.918134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:27.918173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:27.918255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:27.918483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:28.135520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.137962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.138022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.138087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.138166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.138253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:28.138323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57 ready parts: 1/1 2026-01-10T12:02:35.909048Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-10T12:02:35.909096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:35.909141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-10T12:02:35.909217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1710:3577] message: TxId: 281474976730757 2026-01-10T12:02:35.909270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-10T12:02:35.909322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-10T12:02:35.909358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-10T12:02:35.909460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:35.926751Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-10T12:02:35.926890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-10T12:02:35.926991Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-10T12:02:35.927152Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-10T12:02:35.933363Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-10T12:02:35.933551Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:35.933629Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:02:35.943159Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-10T12:02:35.943350Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:35.943389Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:35.943554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:35.943597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1765:3621] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:35.944390Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:35.944750Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-10T12:02:35.946609Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:35.946903Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 344us result status StatusSuccess 2026-01-10T12:02:35.947392Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |82.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> TTxDataShardLocalKMeansScan::TooManyClusters [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::Unlocking_Failed >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:36.177048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:36.177190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.177247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:36.177290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:36.177334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:36.177374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:36.177436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.177512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:36.178572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.178932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:36.359711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:36.359821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:36.361493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.385008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:36.385682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:36.385932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:36.398279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:36.398611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:36.399502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:36.400072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:36.404005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.404271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:36.405745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:36.405824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.405888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:36.405942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:36.406051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:36.406305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:36.715373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.720467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.720777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.720877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.720952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.721774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.724813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.724996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... eshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:38.042772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:34: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:02:38.042921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:02:38.043126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:38.043194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:38.044687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:02:38.046857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:02:38.048320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:38.048377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:38.048593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:02:38.048699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:02:38.048827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.048871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:02:38.048934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:02:38.048967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:02:38.049417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:02:38.049492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:02:38.049633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:02:38.049678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:38.049727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:02:38.049764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:38.049804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:02:38.049855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:38.049902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:02:38.049937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:02:38.050030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:02:38.050071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:02:38.050121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:02:38.050188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:02:38.051139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:38.051236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:38.051282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:02:38.051347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:02:38.051912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:38.053066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:38.053156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:38.053192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:02:38.053225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:02:38.053268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:38.053359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:02:38.059283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:02:38.059889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:02:38.060207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:02:38.060269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:02:38.060767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:02:38.060918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:02:38.060968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:731:2721] TestWaitNotification: OK eventTxId 102 2026-01-10T12:02:38.061587Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:38.061904Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 323us result status StatusSuccess 2026-01-10T12:02:38.062334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:23.377224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:23.377363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:23.377408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:23.377456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:23.377504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:23.377547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:23.377622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:23.377701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:23.378739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:23.379087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:23.478487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:23.478566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:23.514765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:23.514920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:23.515166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:23.536377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:23.536682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:23.537501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:23.537768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:23.544862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:23.545109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:23.546603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:23.546675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:23.546748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:23.546803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:23.546907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:23.547223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:23.737788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.738799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.738936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.739914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:23.740004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-10T12:02:37.946520Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725761:0 progress is 1/1 2026-01-10T12:02:37.946557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-10T12:02:37.946599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2026-01-10T12:02:37.946685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1892:3757] message: TxId: 281474976725761 2026-01-10T12:02:37.946740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-10T12:02:37.946780Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-10T12:02:37.946815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-10T12:02:37.946887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:02:37.952726Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-10T12:02:37.952852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-10T12:02:37.952947Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976725761 2026-01-10T12:02:37.953111Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-10T12:02:37.969417Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-10T12:02:37.969579Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:37.969645Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:02:37.972139Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-10T12:02:37.972317Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:37.972384Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-10T12:02:37.972563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:02:37.972625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1949:3803] TestWaitNotification: OK eventTxId 105 2026-01-10T12:02:37.973532Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-10T12:02:37.973943Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-10T12:02:37.974884Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:37.975179Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 286us result status StatusSuccess 2026-01-10T12:02:37.975695Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:36.346434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:36.346566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.346612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:36.346649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:36.346713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:36.346747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:36.346806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.346873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:36.352927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.353296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:36.586801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:36.586900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:36.587953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.630052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:36.630594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:36.630765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:36.651791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:36.652066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:36.652830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:36.653273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:36.662886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.663097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:36.664535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:36.664606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.664660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:36.664742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:36.664850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:36.665055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:36.872106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.872926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.873750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... ngParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.414084Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.414262Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 193us result status StatusSuccess 2026-01-10T12:02:39.414512Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.415106Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.415254Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 168us result status StatusSuccess 2026-01-10T12:02:39.415623Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.416061Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.418675Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 2.59ms result status StatusSuccess 2026-01-10T12:02:39.419055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.419757Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.419991Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 259us result status StatusSuccess 2026-01-10T12:02:39.420429Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:37.583240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:37.583334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:37.583365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:37.583405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:37.583444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:37.583509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:37.583556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:37.583618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:37.593289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:37.593571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:37.773331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:37.773405Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:37.774913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:37.794403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:37.794804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:37.794970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:37.803173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:37.803451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:37.804225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:37.804650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:37.807753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:37.807937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:37.809136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:37.809193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:37.809240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:37.809281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:37.809383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:37.809616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:37.985702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.986804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.986945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:37.987978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... : 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:02:38.992184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:02:38.992620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:38.992753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:38.992812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:02:38.993159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:02:38.993280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:02:38.993479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:38.993571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:02:38.996771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:38.996832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:38.997129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.997178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:02:38.997470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:02:38.997521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:02:38.997630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:02:38.997709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:02:38.997755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:02:38.997814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:02:38.997878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:02:38.997932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:02:38.997971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:02:38.998011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:02:38.998082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:38.998140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:02:38.998180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:02:38.998906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:02:38.999069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:02:38.999124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:02:38.999174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:02:38.999216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:38.999310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:02:39.003018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:02:39.003621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:02:39.004899Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:672:2662] Bootstrap 2026-01-10T12:02:39.007036Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:672:2662] Become StateWork (SchemeCache [1:677:2667]) 2026-01-10T12:02:39.010408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:39.010777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-10T12:02:39.010882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-10T12:02:39.010940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-10T12:02:39.012026Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:672:2662] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:02:39.022709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.022978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2026-01-10T12:02:39.023772Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:02:39.024048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:02:39.024114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:02:39.024583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:39.024683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:02:39.024737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:687:2677] TestWaitNotification: OK eventTxId 101 2026-01-10T12:02:39.025221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.025499Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 276us result status StatusPathDoesNotExist 2026-01-10T12:02:39.026020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:36.138577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:36.138689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.138737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:36.138772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:36.138823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:36.138859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:36.138913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:36.138980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:36.139908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.140250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:36.283107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:36.283217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:36.284190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:36.301849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:36.302470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:36.302675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:36.309819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:36.310048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:36.310611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:36.310979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:36.313702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.313929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:36.315266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:36.315343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:36.315399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:36.315488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:36.315610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:36.315854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:36.525614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.526759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.526898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.526970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.527946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:36.528027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... fCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:02:39.505735Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.505821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.505853Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:39.505886Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:02:39.505918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:02:39.506908Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.506986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.507016Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:39.507043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:02:39.507072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:39.507140Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:02:39.513979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:02:39.514507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:02:39.520689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:02:39.520886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:02:39.520921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:02:39.521221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:39.521302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:02:39.521331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:738:2727] TestWaitNotification: OK eventTxId 101 2026-01-10T12:02:39.521675Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.521826Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 183us result status StatusSuccess 2026-01-10T12:02:39.522065Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-10T12:02:39.525961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:39.526163Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2026-01-10T12:02:39.526266Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2026-01-10T12:02:39.532942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:39.533205Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:02:39.533523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:02:39.533573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:02:39.533965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:02:39.534062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:02:39.534101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:746:2735] TestWaitNotification: OK eventTxId 103 2026-01-10T12:02:39.534556Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.534747Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 217us result status StatusSuccess 2026-01-10T12:02:39.535121Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:38.481892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:38.482041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.482090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:38.482145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:38.482214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:38.482289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:38.482350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.482444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:38.483473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:38.483819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:38.585473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:38.585553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:38.609010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:38.609147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:38.609363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:38.622464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:38.622745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:38.623582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:38.623821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:38.626980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.627176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:38.628499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:38.628570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.628671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:38.628721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:38.628849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:38.629141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:38.815874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.816719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.816814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.816862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.816910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.816953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.817466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:02:39.639033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:02:39.639167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:02:39.639224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:02:39.639284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:02:39.639319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:02:39.639363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:02:39.639411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:02:39.639462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:02:39.639521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:02:39.639638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:39.639684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:02:39.639727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:02:39.639765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:02:39.641097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.641308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.641364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:39.641412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:02:39.641472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:39.642513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.642610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:39.642646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:39.642681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:02:39.642716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:02:39.642806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:02:39.647466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:02:39.647719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:02:39.647949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:02:39.647995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:02:39.648438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:39.648554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:02:39.648603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:710:2700] TestWaitNotification: OK eventTxId 101 2026-01-10T12:02:39.649209Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:39.649462Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 290us result status StatusSuccess 2026-01-10T12:02:39.649934Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:02:39.654269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:39.654628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-10T12:02:39.654741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:202: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-10T12:02:39.654892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2026-01-10T12:02:39.659550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:39.659918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:02:39.660346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:02:39.660398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:02:39.660862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:02:39.660976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:02:39.661025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:718:2708] TestWaitNotification: OK eventTxId 102 >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TTicketParserTest::AuthorizationRetryError >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:38.660893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:38.660997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.661046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:38.661081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:38.661119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:38.661164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:38.661237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.661323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:38.662172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:38.662469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:38.787604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:38.787678Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:38.788661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:38.803403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:38.803921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:38.804129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:38.814553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:38.814832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:38.815530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:38.815998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:38.818972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.819172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:38.820417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:38.820484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.820538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:38.820581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:38.820674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:38.820885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:38.996244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.997995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.998059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.998151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.998304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.998393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... 09Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2026-01-10T12:02:40.025414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-10T12:02:40.025625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2026-01-10T12:02:40.028860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:40.029162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-10T12:02:40.036748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:40.037150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-10T12:02:40.037225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-10T12:02:40.037366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-10T12:02:40.041062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:40.041384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-10T12:02:40.052885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:40.053466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-10T12:02:40.053619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-10T12:02:40.053815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-10T12:02:40.062623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:40.063017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-10T12:02:40.066476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:40.066756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2026-01-10T12:02:40.066857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2026-01-10T12:02:40.066978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-10T12:02:40.072291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:40.072604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 >> TExternalDataSourceTest::DropExternalDataSource [GOOD] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::TestViewer::test_viewer_peers [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/viewer/tests/py3test >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists >> TTicketParserTest::AuthenticationWithUserAccount >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:38.133377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:38.133467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.133502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:38.133535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:38.133575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:38.133605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:38.133652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:38.133726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:38.134634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:38.134898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:38.324962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:38.325040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:38.325877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:38.345495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:38.346031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:38.346248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:38.356284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:38.356571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:38.357263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:38.357698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:38.360476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.360688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:38.361882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:38.361956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:38.362009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:38.362068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:38.362170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:38.362362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:38.554712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.555825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.555977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:38.556980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... nByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-10T12:02:41.496382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:41.496547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:41.496611Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:02:41.496752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:41.496850Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:02:41.497046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:41.497122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:02:41.497993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:02:41.498095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:02:41.500383Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:41.500425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:41.500618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:02:41.500771Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:41.500806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:02:41.500844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:02:41.501117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:02:41.501160Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:02:41.501257Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:02:41.501281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:41.501326Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:02:41.501366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:41.501394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:02:41.501436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:02:41.501471Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:02:41.501498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:02:41.501569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:41.501599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:02:41.501627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:02:41.501650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:02:41.501988Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:41.502052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:41.502074Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:02:41.502106Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:02:41.502138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:02:41.502333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:02:41.502363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:02:41.502418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:41.502668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:41.502725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:02:41.502748Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:02:41.502768Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:02:41.502788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:41.502836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:02:41.507956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:02:41.510863Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:02:41.510989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:02:41.511150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:02:41.511192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:02:41.511506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:02:41.511600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:02:41.511649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:734:2723] TestWaitNotification: OK eventTxId 102 2026-01-10T12:02:41.512182Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:41.512362Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 226us result status StatusPathDoesNotExist 2026-01-10T12:02:41.512504Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TTicketParserTest::NebiusAuthenticationUnavailable >> ColumnBuildTest::Unlocking_Failed [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:39.435504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:39.435619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:39.435663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:39.435703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:39.435746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:39.435774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:39.435858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:39.435926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:39.436892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:39.437181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:39.746103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:39.746179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:39.747158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:39.776816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:39.777367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:39.777562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:39.833352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:39.833693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:39.834379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:39.834861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:39.840200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:39.840427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:39.841749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:39.841822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:39.841880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:39.841960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:39.842077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:39.842305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:40.031309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.032997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.033642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:42.823320Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:02:42.823363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:42.824663Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:42.824750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:02:42.824801Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:02:42.824834Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:02:42.824865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:42.824943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:02:42.828708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:02:42.829107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:02:42.829326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:02:42.829373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:02:42.829746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:42.829852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:02:42.829889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:712:2702] TestWaitNotification: OK eventTxId 101 2026-01-10T12:02:42.830323Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:42.830546Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 259us result status StatusSuccess 2026-01-10T12:02:42.830953Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:02:42.834296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:02:42.834655Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2026-01-10T12:02:42.834755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2026-01-10T12:02:42.834894Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:02:42.838124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:02:42.838412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:02:42.838776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:02:42.838825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:02:42.839228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:02:42.839336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:02:42.839385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:720:2710] TestWaitNotification: OK eventTxId 102 2026-01-10T12:02:42.840069Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:42.840326Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 284us result status StatusSuccess 2026-01-10T12:02:42.840721Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:40.696490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:40.696596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:40.696649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:40.696691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:40.696743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:40.696783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:40.696851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:40.696905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:40.697707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:40.698010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:40.863130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:40.863461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:40.864604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:40.885484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:40.886072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:40.886299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:40.897537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:40.897859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:40.898791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:40.899260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:40.904802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:40.905062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:40.906596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:40.906665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:40.906726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:40.906777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:40.906893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:40.907177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:41.099987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.105704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.105934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.106996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:41.107077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... rRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-10T12:02:42.520049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:42.520260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:02:42.520408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-10T12:02:42.520577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:42.520691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:02:42.520918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:42.520990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:02:42.522378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:02:42.531717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:02:42.533007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:42.533180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:42.533721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:02:42.534321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:42.534464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-10T12:02:42.534641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:02:42.544720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:02:42.544908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:02:42.545301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:02:42.545432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:02:42.545559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:02:42.545687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:02:42.545812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:02:42.545931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:02:42.546055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:02:42.546146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:02:42.546324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:02:42.546459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:02:42.546528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:02:42.546596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:02:42.547551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:02:42.547729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:02:42.547910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:02:42.548008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:02:42.555458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:02:42.556465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:02:42.556579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:02:42.556729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:02:42.557207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:02:42.557320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:02:42.557361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:02:42.557393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:02:42.557433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:02:42.557555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:02:42.572447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:02:42.572627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:02:42.572796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:02:42.573181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:02:42.573298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:02:42.574089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:02:42.574285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:02:42.574379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:794:2784] TestWaitNotification: OK eventTxId 104 2026-01-10T12:02:42.575344Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:42.575753Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 389us result status StatusPathDoesNotExist 2026-01-10T12:02:42.576015Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> DataShardSnapshots::MvccSnapshotTailCleanup >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |82.9%| [TM] {RESULT} ydb/core/viewer/tests/py3test |82.9%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> DataShardSnapshots::VolatileSnapshotSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:02:40.027675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:40.027766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:40.027802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:40.027837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:40.027876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:40.027908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:40.027982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:40.028039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:40.029021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:40.029334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:40.189057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:02:40.189129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:40.189998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:40.205186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:40.205789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:40.205988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:40.220833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:40.221125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:40.221850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:40.222269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:40.225210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:40.225409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:40.226614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:40.226682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:40.226729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:40.226784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:40.226878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:40.227079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:40.442327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.443881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.444020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.448297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.448460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.448556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.448646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.448940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.449212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:40.449303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... shard: 72057594046678944 2026-01-10T12:02:44.052883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.052905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.053041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.053187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.053247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.053341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.053467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.053639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.053756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-10T12:02:44.053824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.053850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.053944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.053966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-10T12:02:44.054087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-10T12:02:44.054265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-10T12:02:44.054650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:02:44.054729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.054836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.054930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:02:44.055050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.055163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:02:44.055275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.055399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.055582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.055706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.055868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.055890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:801:2791] 2026-01-10T12:02:44.056026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:02:44.056057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:801:2791] TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-10T12:02:44.060293Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:02:44.060597Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 527us result status StatusSuccess 2026-01-10T12:02:44.060996Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::MvccSnapshotAndSplit |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:02:30.523138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:02:30.523299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:30.523347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:02:30.523389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:02:30.523432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:02:30.523475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:02:30.523574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:02:30.523652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:02:30.524620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:02:30.524952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:02:30.619093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:02:30.619156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:30.644475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:02:30.644617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:02:30.644879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:02:30.659718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:02:30.659998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:02:30.660851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:02:30.661111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:02:30.673819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:30.674023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:02:30.675321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:02:30.675386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:02:30.675459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:02:30.675514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:02:30.675638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:02:30.676003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:02:30.882882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.883993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.884963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.885039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.885174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:02:30.885269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3.624239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976725761, at schemeshard: 72075186233409549 2026-01-10T12:02:43.624324Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-10T12:02:43.624376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-10T12:02:43.624435Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2026-01-10T12:02:43.624546Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1662:3533], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-10T12:02:43.636179Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking 2026-01-10T12:02:43.636372Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1662:3533], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:43.636457Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:02:43.639988Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected 2026-01-10T12:02:43.640180Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1662:3533], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:02:43.640226Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-10T12:02:43.640426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:02:43.640477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1678:3549] TestWaitNotification: OK eventTxId 106 2026-01-10T12:02:43.641261Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-10T12:02:43.641679Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-10T12:02:43.642569Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:02:43.642825Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 279us result status StatusSuccess 2026-01-10T12:02:43.643314Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpFail::OnPrepare >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpWrite::Insert >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> KqpImmediateEffects::UpsertExistingKey >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> DescribeSchemaSecretsService::BigBatchRequest [GOOD] >> DescribeSchemaSecretsService::EmptyBatch >> KqpImmediateEffects::Delete >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> KqpInplaceUpdate::SingleRowStr-UseSink >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |83.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |83.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> TTicketParserTest::AuthenticationUnsupported >> KqpImmediateEffects::ConflictingKeyW1RR2 >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> TTicketParserTest::AccessServiceAuthenticationOk >> KqpInplaceUpdate::SingleRowIf-UseSink >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> TTicketParserTest::AuthorizationRetryError [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> TSelectFromViewTest::ReadTestCasesFromFiles >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TMLPWriterTests::TopicNotExists [GOOD] >> TMLPWriterTests::EmptyWrite >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |83.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |83.1%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpOverload::OltpOverloaded+Distributed >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> KqpFail::OnPrepare [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |83.1%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> KqpEffects::DeletePkPrefixWithIndex >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DescribeSchemaSecretsService::EmptyBatch [GOOD] >> DescribeSchemaSecretsService::MixedGrantsInBatch >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount |83.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 |83.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |83.1%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> KqpWrite::InsertRevert [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] |83.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/common/py3test |83.1%| [TM] {RESULT} ydb/tests/fq/common/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14249, MsgBus: 5641 2026-01-10T12:02:47.045648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702185859702562:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:47.049902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:47.079798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:47.518727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:47.518810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:47.568030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:47.569254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:47.620935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:47.624256Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702181564735041:2081] 1768046566997326 != 1768046566997329 2026-01-10T12:02:47.809485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:47.833898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:47.833918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:47.833927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:47.833996Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:48.046211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:48.540044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:48.557718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:02:48.644499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:48.900468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.240578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.386972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:51.820554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702203039573398:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:51.820697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:51.821275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702203039573408:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:51.821320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.051565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702185859702562:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:52.051673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:52.314808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.367957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.410337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.470985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.515575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.607648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.657944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.729046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.876556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702207334541581:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.876650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.877111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702207334541586:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.877151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702207334541587:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.877480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:52.881544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:52.905628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702207334541590:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:02:53.001172Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702207334541641:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... ypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:02:55.211471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 12254, MsgBus: 19057 2026-01-10T12:02:57.580300Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:57.580585Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:57.587523Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:57.587618Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:57.601508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:57.618998Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:57.624968Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702228219923543:2081] 1768046577312320 != 1768046577312323 2026-01-10T12:02:57.872274Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:57.880624Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:57.880648Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:57.880654Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:57.880726Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:58.397977Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:58.741904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:58.756465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:02:58.770963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.113164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.668864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.859914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.292130Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702253989729201:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:03.292208Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:03.292585Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702253989729211:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:03.292623Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:03.474557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.530805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.602258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.698650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.768271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:03.887183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:04.002670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:04.176786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:04.470261Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702258284697384:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:04.470388Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:04.470867Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702258284697389:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:04.470914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702258284697390:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:04.471019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:04.475334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:04.539192Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702258284697393:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:04.595257Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702258284697452:3783] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:08.565690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> TMLPWriterTests::EmptyWrite [GOOD] >> TMLPWriterTests::WriteOneMessage >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |83.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 15051, MsgBus: 15610 2026-01-10T12:02:48.244255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702190482372253:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:48.244324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:48.650182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.650263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:48.653554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:48.790606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:48.841451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.001369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:49.085524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:49.085554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:49.085579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:49.085691Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:49.263978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.081290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.087112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:50.178313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.403637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.721340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.815536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.245584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702190482372253:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:53.245656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:53.579792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702211957210567:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.579917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.580672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702211957210577:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.580730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.115757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.199947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.246145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.282137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.332497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.405036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.473553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.594101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.786871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702216252178756:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.786980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.787394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702216252178761:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.787467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702216252178762:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.787638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.791824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:54.805180Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702216252178765:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:02:54.893678Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702216252178816:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:02:57.336405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000994s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:03:11.807676Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:7593702288164687863:2543] TxId: 281474976710676. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:03:11.807838Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2026-01-10T12:03:11.808090Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:831} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-10T12:03:11.808284Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710677. Resolved key sets: 1 2026-01-10T12:03:11.808438Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710677. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 54] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-10T12:03:11.808679Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-10T12:03:11.808826Z node 2 :KQP_EXECUTER DEBUG: kqp_shards_resolver.cpp:76: [ShardsResolver] TxId: 281474976710677. Shard resolve complete, resolved shards: 1 2026-01-10T12:03:11.808864Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:259} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-10T12:03:11.808897Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037927] trace_id# 2026-01-10T12:03:11.808941Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1768046591031} 2026-01-10T12:03:11.809169Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:7593702288164687874:2543] 2026-01-10T12:03:11.809204Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7593702288164687874:2543], channels: 1 2026-01-10T12:03:11.809265Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:03:11.809291Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7593702288164687874:2543], trace_id# 2026-01-10T12:03:11.809324Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7593702288164687874:2543], trace_id# 2026-01-10T12:03:11.809350Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:03:11.809901Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7593702288164687874:2543] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:03:11.809925Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7593702288164687874:2543], trace_id# 2026-01-10T12:03:11.809952Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7593702288164687874:2543], trace_id# 2026-01-10T12:03:11.811441Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7593702288164687874:2543] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 791 Tasks { TaskId: 1 CpuTimeUs: 220 FinishTimeMs: 1768046591810 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 135 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768046591810 CreateTimeMs: 1768046591809 UpdateTimeMs: 1768046591810 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:03:11.811521Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7593702288164687874:2543] 2026-01-10T12:03:11.811754Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:03:11.811790Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:7593702288164687870:2543] TxId: 281474976710677. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000791s ReadRows: 1 ReadBytes: 22 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:03:11.812580Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710678. Resolved key sets: 0 2026-01-10T12:03:11.812681Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710678. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1768046591031} 2026-01-10T12:03:11.812719Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:7593702288164687877:2543] TxId: 281474976710678. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:03:11.812772Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:7593702288164687877:2543] TxId: 281474976710678. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7593702288164687862:2543] trace_id# 2026-01-10T12:03:11.812814Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:7593702288164687877:2543] TxId: 281474976710678. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:03:11.815145Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:7593702288164687877:2543] TxId: 281474976710678. Ctx: { TraceId: 01kekwkzmt3d36yzdn02v0c6qh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjk1OWNkZGItZjdiNGQxMTAtM2RmMTU1ZWQtNDM4YTg5YTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 28898, MsgBus: 19480 2026-01-10T12:02:47.599669Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702184701935788:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:47.599917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:48.025483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.025592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:48.030505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:48.211812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:48.260052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702184701935614:2081] 1768046567544292 != 1768046567544295 2026-01-10T12:02:48.262600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:48.404237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:48.537023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:48.537075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:48.537084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:48.537168Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:48.592276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:49.301748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:49.314257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:49.382945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.619058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.909203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.048202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.600286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702184701935788:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:52.610291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:53.189790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702210471741273:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.189897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.190375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702210471741283:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.190425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.680360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.747544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.809049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.909459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.966804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.062758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.145154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.296989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.444384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214766709465:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.444476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.445316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214766709470:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.445366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214766709471:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.445420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.450875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:54.476201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702214766709474:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:02:54.567295Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702214766709527:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:02:56.771259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... p:440: Table profiles were not loaded 2026-01-10T12:02:59.531579Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702237101796442:2081] 1768046579397163 != 1768046579397166 2026-01-10T12:02:59.553942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:59.554021Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:59.565388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:59.704266Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:59.823738Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:59.823764Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:59.823776Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:59.823864Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:00.405602Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:00.488550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:00.499689Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:00.513684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.623277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.848766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.956619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:04.399590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593702237101796471:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:04.399682Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:05.027262Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702258576634801:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.027613Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.028628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702262871602107:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.028697Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.029085Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702262871602109:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.029129Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.099935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.154155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.203020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.247626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.307827Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.359893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.402316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.498720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:05.614014Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702262871602984:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.614124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.614624Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702262871602989:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.614662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702262871602990:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.614721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:05.618934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:05.633844Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702262871602993:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:05.727598Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702262871603044:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:08.713148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |83.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> KqpEffects::UpdateOn_Literal >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2026-01-10T12:02:44.614126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702174257177569:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:44.614386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:44.645577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:45.163140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:45.163628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:45.163718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:45.172885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:45.279853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702174257177344:2081] 1768046564563987 != 1768046564563990 2026-01-10T12:02:45.346895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:45.449114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:45.463196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:45.463221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:45.463232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:45.463342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:45.562861Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:45.782124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:45.807907Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 3D8D4E0D91557FD0B94F6D67E31F516FE3E7621BAC4290D755D2E539AC6DDF10 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-10T12:02:50.018596Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702198125018460:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:50.018741Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:50.048526Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:50.256161Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:50.268005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:50.272299Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.272990Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:50.287981Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702193830050998:2081] 1768046569998111 != 1768046569998114 2026-01-10T12:02:50.288928Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.540024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:50.561026Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:50.561045Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:50.561053Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:50.561139Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:50.977733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.988492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:50.991812Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket CBF60352E8742CC701BA44331FEC987511FD9FC2305BC6C62333EC9637A3A193 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-10T12:02:51.044106Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:56.378919Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:56.379065Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:56.446600Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593702225289313214:2081] 1768046576129167 != 1768046576129170 2026-01-10T12:02:56.459394Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:56.470379Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:56.470464Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:56.488713Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:56.608436Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:56.630011Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:56.630036Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:56.630046Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:56.630125Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:56.886553Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:56.892999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:56.895792Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 260C212395DAFF369CC9846B053AA82806DC25BC5372DED4140B61E35F68D199 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-10T12:02:56.896586Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 260C212395DAFF369CC9846B053AA82806DC25BC5372DED4140B61E35F68D199: Cannot create token from certificate. Client certificate failed verification 2026-01-10T12:03:01.526611Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593702247226306233:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:01.526938Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:01.562052Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:01.760284Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593702247226306011:2081] 1768046581487965 != 1768046581487968 2026-01-10T12:03:01.784621Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:01.809179Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:01.809274Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:01.856263Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:01.876022Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:02.089028Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:02.089054Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:02.089064Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:02.089165Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:02.415394Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:02.476244Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:02.556559Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:02.565463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:02.575395Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 64CE21E85358D66196EFC6BC304288E8F982BECB49882D4B30722918C0BC4146 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-10T12:03:07.140352Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593702272816590905:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:07.140612Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:07.165828Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:07.303784Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:07.307911Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:07.319501Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593702272816590801:2081] 1768046587132355 != 1768046587132358 2026-01-10T12:03:07.328424Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:07.328520Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:07.352586Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:07.620905Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:07.621210Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:07.621219Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:07.621229Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:07.621326Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:07.936647Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:07.965910Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket FFF44801E2D41E1267FFDB00C819B76B0AC10B8538ACD341D31279198554C96F () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-10T12:03:07.966717Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket FFF44801E2D41E1267FFDB00C819B76B0AC10B8538ACD341D31279198554C96F: Cannot create token from certificate. Client certificate failed verification 2026-01-10T12:03:08.149049Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> KqpWrite::UpsertNullKey >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> KqpOverload::OltpOverloaded+Distributed [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229 folder_id=folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229 iam_token=usr_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229 cloud_account=acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229 2026-01-10T12:02:16.477624Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229]","tx_id":"281474976720697","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.643996Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud]","tx_id":"281474976720703","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.730755Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2]","tx_id":"281474976720704","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.803271Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Deduplication]","tx_id":"281474976720706","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.803564Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/State]","tx_id":"281474976720710","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.803764Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Attributes]","tx_id":"281474976720705","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.803961Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Groups]","tx_id":"281474976720708","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.808988Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/SentTimestampIdx]","tx_id":"281474976720712","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.814287Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Messages]","tx_id":"281474976720711","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.814647Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Reads]","tx_id":"281474976720709","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:16.830387Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Data]","tx_id":"281474976720707","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:17.557999Z: {"request_id":"e3187512-23a781a3-dba23eb9-6420690c","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229"} ======================================== 2026-01-10T12:02:17.770718Z: {"request_id":"e3187512-23a781a3-dba23eb9-6420690c","permission":"ymq.queues.create","id":"13223073333604113391$CreateMessageQueue$2026-01-10T12:02:17.770517Z","idempotency_id":"13223073333604113391$CreateMessageQueue$2026-01-10T12:02:16.557000Z","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:16.557000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_34b247c1-ee1c-11f0-bb8a-d00d67ac8229.fifo","resource_id":"000000000000000103ud","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:19.427403Z: {"request_id":"68a0e590-b4f220dc-d07739dc-28cc5e22","permission":"ymq.queues.setAttributes","id":"2259791367374629496$UpdateMessageQueue$2026-01-10T12:02:19.427191Z","idempotency_id":"2259791367374629496$UpdateMessageQueue$2026-01-10T12:02:18.690000Z","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:18.690000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_34b247c1-ee1c-11f0-bb8a-d00d67ac8229.fifo","resource_id":"000000000000000103ud","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:20.026247Z: {"request_id":"ee7f4ccc-cd97e3e2-1e17e7d2-7f5a8c84","permission":"ymq.queues.setAttributes","id":"9755717644854613422$UpdateMessageQueue$2026-01-10T12:02:20.026057Z","idempotency_id":"9755717644854613422$UpdateMessageQueue$2026-01-10T12:02:19.880000Z","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:19.880000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_34b247c1-ee1c-11f0-bb8a-d00d67ac8229.fifo","resource_id":"000000000000000103ud","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:21.200007Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/SentTimestampIdx]","tx_id":"281474976720729","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:21.361840Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/State]","tx_id":"281474976720730","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:21.533932Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Reads]","tx_id":"281474976720731","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:21.685076Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Messages]","tx_id":"281474976720732","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:21.890964Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Groups]","tx_id":"281474976720733","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:22.040727Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229/000000000000000103ud/v2/Deduplication]","tx_id":"281474976720734","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:22.760355Z: {"request_id":"d7c915a9-b78b5197-c3394e04-67d47a84","permission":"ymq.queues.delete","id":"10327248016936954$DeleteMessageQueue$2026-01-10T12:02:22.760162Z","idempotency_id":"10327248016936954$DeleteMessageQueue$2026-01-10T12:02:21.030000Z","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:21.030000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_34b247c1-ee1c-11f0-bb8a-d00d67ac8229.fifo","resource_id":"000000000000000103ud","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:22.765572Z: {"request_id":"d7c915a9-b78b5197-c3394e04-67d47a84","permission":"ymq.queues.delete","id":"10327248016936954$DeleteMessageQueue$2026-01-10T12:02:22.765417Z","idempotency_id":"10327248016936954$DeleteMessageQueue$2026-01-10T12:02:21.030000Z","cloud_id":"CLOUD_FOR_folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:21.030000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_34b247c1-ee1c-11f0-bb8a-d00d67ac8229.fifo","resource_id":"000000000000000103ud","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_34e3c39e-ee1c-11f0-87e6-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:22.842514Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folde ... -ee1c-11f0-9350-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:39.817000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_404849c7-ee1c-11f0-b391-d00d67ac8229","resource_id":"00000000000000030520","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:40.999883Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/3/SentTimestampIdx]","tx_id":"281474976720810","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.101682Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/3/Infly]","tx_id":"281474976720811","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.230407Z: {"request_id":"2058d752-73c956a-3bd27687-20a03ccb","permission":"ymq.queues.delete","id":"14895176980376895634$DeleteMessageQueue$2026-01-10T12:02:41.230243Z","idempotency_id":"14895176980376895634$DeleteMessageQueue$2026-01-10T12:02:40.912000Z","cloud_id":"CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:02:40.912000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_404849c7-ee1c-11f0-b391-d00d67ac8229","resource_id":"00000000000000030520","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:02:41.263654Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/3/Messages]","tx_id":"281474976720812","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.371054Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/3/MessageData]","tx_id":"281474976720813","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.526976Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/2/SentTimestampIdx]","tx_id":"281474976720814","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.745187Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/2/Infly]","tx_id":"281474976720815","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:41.896434Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/2/Messages]","tx_id":"281474976720817","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.024667Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/2/MessageData]","tx_id":"281474976720818","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.189416Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/1/SentTimestampIdx]","tx_id":"281474976720819","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.350257Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/1/Infly]","tx_id":"281474976720821","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.492781Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/1/Messages]","tx_id":"281474976720822","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.617723Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/1/MessageData]","tx_id":"281474976720823","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.784304Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/0/SentTimestampIdx]","tx_id":"281474976720824","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:42.982321Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/0/Infly]","tx_id":"281474976720825","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.097587Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/0/Messages]","tx_id":"281474976720826","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.259966Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/0/MessageData]","tx_id":"281474976720827","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.482159Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/State]","tx_id":"281474976720828","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.617270Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/Attributes]","tx_id":"281474976720829","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.814292Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/3]","tx_id":"281474976720830","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.888334Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/2]","tx_id":"281474976720831","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:43.972561Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/1]","tx_id":"281474976720833","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:44.047926Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4/0]","tx_id":"281474976720834","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:44.118654Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520/v4]","tx_id":"281474976720835","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:44.197413Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229/00000000000000030520]","tx_id":"281474976720836","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:02:44.249393Z: {"request_id":"2058d752-73c956a-3bd27687-20a03ccb","cloud_id":"CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229","subject":"fake_user_sid@as","queue":"00000000000000030520","resource_id":"00000000000000030520","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_406a5048-ee1c-11f0-9350-d00d67ac8229"} ======================================== ======================================== |83.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2026-01-10T12:02:43.252668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702170126546184:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:43.252760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:43.789523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:43.820667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:43.822923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:43.889783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:44.052530Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702170126545954:2081] 1768046563131759 != 1768046563131762 2026-01-10T12:02:44.071041Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:44.101758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:44.144061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:44.280781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:44.280816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:44.280830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:44.280934Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:44.646471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:44.653645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:02:44.658585Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:02:44.658728Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d14706f15d0] Connect to grpc://localhost:5865 2026-01-10T12:02:44.663004Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14706f15d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-10T12:02:44.682620Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14706f15d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:02:44.683234Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-10T12:02:44.687973Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d14706f1cd0] Connect to grpc://localhost:14908 2026-01-10T12:02:44.689173Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14706f1cd0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-10T12:02:44.716362Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14706f1cd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-10T12:02:44.717131Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-10T12:02:48.592286Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:48.592470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:48.601416Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:48.605254Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702187606258298:2081] 1768046568342284 != 1768046568342287 2026-01-10T12:02:48.638787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.638872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:48.657355Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:48.904811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:48.908956Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:48.908979Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:48.908987Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:48.909067Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:49.223384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:49.248880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:02:49.256417Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:02:49.256502Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d14707e1c50] Connect to grpc://localhost:18234 2026-01-10T12:02:49.257592Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14707e1c50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-10T12:02:49.285086Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d14707e1c50] Status 14 Service Unavailable 2026-01-10T12:02:49.288252Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:49.288295Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:02:49.288449Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14707e1c50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-10T12:02:49.298595Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d14707e1c50] Status 1 CANCELLED 2026-01-10T12:02:49.300555Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-10T12:02:49.418195Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:53.074210Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593702212731825914:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:53.074271Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:53.130829Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:53.320917Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:53.321079Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:53.321691Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:53.324474Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:53.464474Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:02:53.464531Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:02:53.479810Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:53.494063Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:53.494123Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:53.494136Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:53.494228Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:53.593359Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:54.117126Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:54.129789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:54.145188Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (8E120919): Token is not supported 2026-01-10T12:02:58.605455Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:58.605626Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... 0] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:04.224244Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d147075c8d0] Status 16 Access Denied 2026-01-10T12:03:04.224518Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:04.224544Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2026-01-10T12:03:04.225178Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:04.225366Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:04.227394Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d147075c8d0] Status 16 Access Denied 2026-01-10T12:03:04.227720Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:04.227751Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2026-01-10T12:03:04.228411Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:04.228591Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-10T12:03:04.232038Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d147075c8d0] Status 16 Access Denied 2026-01-10T12:03:04.239249Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:04.239299Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-10T12:03:04.240131Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:04.240334Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:04.242546Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d147075c8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:04.246266Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-10T12:03:04.246400Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:04.247113Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:04.247376Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-10T12:03:04.313413Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d147075c8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:04.313618Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-10T12:03:04.313703Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:04.328852Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2026-01-10T12:03:04.329100Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2026-01-10T12:03:04.332224Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d147075c8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:04.332616Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2026-01-10T12:03:04.332694Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:04.335627Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (6968D2E8) asking for AccessServiceAuthorization(something.write) 2026-01-10T12:03:04.335861Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d147075c8d0] Request AuthorizeRequest { iam_token: "**** (6968D2E8)" permission: "something.write" resource_path { id: "123" type: "ydb.database" } resource_path { id: "folder" type: "resource-manager.folder" } } 2026-01-10T12:03:04.337841Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d147075c8d0] Response AuthorizeResponse { subject { service_account { id: "service1" } } } 2026-01-10T12:03:04.338133Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (6968D2E8) permission something.write now has a valid subject "service1@as" 2026-01-10T12:03:04.338222Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-10T12:03:04.372250Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:09.964553Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:09.964697Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:10.108661Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593702281715080770:2081] 1768046589841863 != 1768046589841866 2026-01-10T12:03:10.169993Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:10.187013Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:10.187107Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:10.227502Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:10.352984Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:10.428781Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:10.428812Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:10.428821Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:10.428911Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:10.774197Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:10.785318Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:10.791402Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:10.791525Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d14707a8050] Connect to grpc://localhost:1145 2026-01-10T12:03:10.792531Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14707a8050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:10.814845Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14707a8050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:10.815213Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-10T12:03:10.815319Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:10.816016Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:10.816089Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-10T12:03:10.816249Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14707a8050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:10.816871Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14707a8050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:10.819142Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14707a8050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:10.819300Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14707a8050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:10.819502Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-10T12:03:10.819547Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-10T12:03:10.819636Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:10.872276Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::AuthorizationUnavailable |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |83.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TestProtocols::TestResolveProtocol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpOverload::OltpOverloaded+Distributed [GOOD] Test command err: Trying to start YDB, gRPC: 25595, MsgBus: 29992 2026-01-10T12:02:49.585497Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702194378095344:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:49.585673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:49.678975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:50.135198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:50.143391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:50.143481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.149197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.242207Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702194378095132:2081] 1768046569550001 != 1768046569550004 2026-01-10T12:02:50.249282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:50.320007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:50.448855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:50.448879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:50.448888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:50.448981Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:50.567874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:51.272906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:51.296489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:51.474914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:51.742547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:51.979629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.124112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.575792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702194378095344:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:54.575856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:55.260982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702220147900799:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.261115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.261620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702220147900809:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.261680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.886396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:55.932560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:55.988601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.030366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.080744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.149442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.285025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.389023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.520640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702224442868980:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:56.520750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:56.523268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702224442868985:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:56.523317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702224442868986:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:56.523632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:56.528034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:56.542450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702224442868989:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:02:56.606794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702224442869044:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 6265, MsgBus: 14307 ... waiting for SysViewsRoster update finished 2026-01-10T12:03:05.582906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:05.593912Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:03:05.594251Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:05.594311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:03:05.940963Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:05.941145Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:05.983560Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046581093879 != 1768046581093883 2026-01-10T12:03:05.994132Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:06.045019Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:06.183544Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:03:06.812865Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:06.812932Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:06.812971Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:06.813407Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:06.828992Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:07.812127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:07.918241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:08.393311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:08.995250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:09.463696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.684894Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1913:3519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.685340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.686509Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1987:3538], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.686633Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.751823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:11.031361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:11.316685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:11.658053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:11.962875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:12.339779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:12.710806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:13.198257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:13.787973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2801:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:13.788173Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:13.788711Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2805:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:13.788797Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:13.788873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2808:4189], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:13.796451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:13.965871Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2810:4191], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:14.067146Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2867:4229] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=281474976715675; |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DescribeSchemaSecretsService::MixedGrantsInBatch [GOOD] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61850, MsgBus: 14801 2026-01-10T12:02:54.296432Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702217330890326:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:54.296502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:54.885935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:54.886142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:55.001614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:55.051674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:55.064563Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702217330890083:2081] 1768046574206581 != 1768046574206584 2026-01-10T12:02:55.077394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:55.296688Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:55.302189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:55.322809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:55.322828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:55.322835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:55.322903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:56.069175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:56.196813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.515621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.798349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.916749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.095375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702238805728437:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.095588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.100282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702238805728447:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.100402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.296299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702217330890326:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:59.296392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:59.620657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.761453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.816778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.859498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.910301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.021607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.151774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.315435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.446091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702243100696620:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.446205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.446687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702243100696625:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.446731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702243100696626:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.446852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.451224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:00.477623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:03:00.482279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702243100696629:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:00.581929Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702243100696680:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:03.891039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:06.997646Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:07.390889Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:07.394066Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:07.403534Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702266317744148:2081] 1768046586930472 != 1768046586930475 2026-01-10T12:03:07.453821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:07.453912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:07.631768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:07.712403Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:07.990701Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:07.999114Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:07.999136Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:07.999143Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:07.999218Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:09.085857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:09.100722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:09.111968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:09.239712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:09.568977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:09.663338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:11.935379Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593702266317744183:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:11.935473Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:14.309399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702300677484415:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:14.309478Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:14.309872Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702300677484425:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:14.309912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:14.514771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.585468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.656551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.718914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.801728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.928807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:15.039440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:15.218989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:15.353215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702304972452616:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:15.353306Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:15.353614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702304972452621:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:15.353630Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702304972452622:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:15.353671Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:15.358536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:15.380847Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702304972452625:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:15.476648Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702304972452676:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:18.625649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TInterconnectTest::TestConnectAndDisconnect >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TestProtocols::TestHTTPRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2026-01-10T12:02:43.380528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702170131869449:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:43.432537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:43.504702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:43.922402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:43.937304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:43.937422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:44.076502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:44.077015Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702170131869337:2081] 1768046563332954 != 1768046563332957 2026-01-10T12:02:44.097432Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:44.287265Z node 1 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-10T12:02:44.287367Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-10T12:02:44.316204Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-10T12:02:44.316262Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-10T12:02:44.316279Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-10T12:02:44.325206Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-10T12:02:44.325384Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-10T12:02:44.332269Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:02:44.332395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:44.332451Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4bd0bf07d0] Connect to grpc://localhost:28170 2026-01-10T12:02:44.379283Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0bf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:02:44.433048Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:44.433093Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-10T12:02:44.528182Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4bd0bf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:02:44.528631Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:02:48.351819Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:48.355994Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:48.538443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:48.584725Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:48.592210Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702191148478197:2081] 1768046568199278 != 1768046568199281 2026-01-10T12:02:48.673519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.673632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:48.695726Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:48.820992Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-10T12:02:48.821213Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:02:48.821235Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-10T12:02:48.821250Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-10T12:02:48.821275Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-10T12:02:48.821292Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-10T12:02:48.821350Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4bd0c23050] Connect to grpc://localhost:15269 2026-01-10T12:02:48.822271Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c23050] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:02:48.879465Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-10T12:02:48.902084Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c23050] Status 16 Unauthenticated service 2026-01-10T12:02:48.902759Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-10T12:02:48.902796Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-01-10T12:02:48.902855Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-10T12:02:48.902917Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:02:48.903164Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c23050] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:02:48.905804Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4bd0c23050] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:02:48.906135Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:02:48.906236Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:49.279025Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:49.283535Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-10T12:02:50.288256Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-10T12:02:51.296202Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-10T12:02:51.296253Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:70: Refresh token for provider# token-for-access-service 2026-01-10T12:02:51.296335Z node 2 :TOKEN_MANAGER TRACE: vm_metadata_token_provider_handler.cpp:25: Handle send request to vm metaservice 2026-01-10T12:02:51.300253Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-10T12:02:51.300435Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-10T12:02:51.300484Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:02:51.300502Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-10T12:02:51.300513Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-10T12:02:51.300546Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-10T12:02:51.300560Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-10T12:02:51.300866Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c23050] Request BulkAuthorizeRequest { iam_token: "**** (9D42FAED)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:02:51.301495Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# new-****ount (82D66F55) 2026-01-10T12:02:51.304422Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c23050] Status 16 Unauthenticated service 2026-01-10T12:02:51.306391Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (9D42FAED) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-10T12:02:51.306438Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (9D42FAED) () has now retryable error message 'Unauthenticated service' 2026-01-10T12:02:51.306477Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: new-****ount (82D66F55) 2026-01-10T12:02:51.306521Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:02:51.306817Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c230 ... led at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:58.045165Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:02:58.052269Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2026-01-10T12:02:58.052333Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4bd0cc12d0] Connect to grpc://localhost:26344 2026-01-10T12:02:58.053117Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0cc12d0] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2026-01-10T12:02:58.069447Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4bd0cc12d0] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2026-01-10T12:02:58.072398Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2026-01-10T12:03:03.479654Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:03.479748Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:03.564519Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:03.566407Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593702252581311023:2081] 1768046583425895 != 1768046583425898 2026-01-10T12:03:03.582427Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:03.582527Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:03.616978Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:03.730131Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:03.803661Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:03.803688Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:03.803703Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:03.803788Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:04.063263Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:04.072427Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:04.076213Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:04.076292Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4bd0c41350] Connect to grpc://localhost:9481 2026-01-10T12:03:04.077648Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c41350] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:04.099103Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c41350] Status 14 Service Unavailable 2026-01-10T12:03:04.100215Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:04.100265Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:04.100489Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c41350] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:04.106039Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c41350] Status 14 Service Unavailable 2026-01-10T12:03:04.106188Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:04.456240Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:04.470817Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:03:04.470880Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:04.471101Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c41350] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:04.478328Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c41350] Status 14 Service Unavailable 2026-01-10T12:03:04.480337Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:05.470130Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:03:05.470158Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:05.470315Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c41350] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:05.472648Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c41350] Status 14 Service Unavailable 2026-01-10T12:03:05.472936Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:07.477555Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:03:07.477611Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:07.477895Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c41350] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:07.486416Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4bd0c41350] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:07.486825Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-10T12:03:18.340632Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593702319805270384:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:18.340674Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:18.489333Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:18.647330Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:18.652302Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593702319805270357:2081] 1768046598315688 != 1768046598315691 2026-01-10T12:03:18.675734Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:18.675827Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:18.731042Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:18.812417Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:18.991436Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:18.991464Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:18.991472Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:18.991564Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:19.380762Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:19.570075Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:19.583751Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:19.583826Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4bd0c2d150] Connect to grpc://localhost:63246 2026-01-10T12:03:19.584757Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c2d150] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:19.600352Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4bd0c2d150] Status 14 Service Unavailable 2026-01-10T12:03:19.600873Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:19.600904Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-10T12:03:19.601101Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4bd0c2d150] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-10T12:03:19.607374Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4bd0c2d150] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:19.607708Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2026-01-10T12:02:19.405453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:02:19.463007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:02:19.463306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:02:19.470972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:02:19.471236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:02:19.471434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:02:19.471520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:02:19.471614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:02:19.471747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:02:19.471845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:02:19.471934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:02:19.472002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:02:19.472117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:02:19.472262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:02:19.472339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:02:19.472437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:02:19.517495Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:02:19.518813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:02:19.518929Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:02:19.519141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:19.519309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:02:19.519417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:02:19.519470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:02:19.519584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:02:19.519657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:02:19.519712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:02:19.519745Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:02:19.519935Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:19.520002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:02:19.520091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:02:19.520167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:02:19.520277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:02:19.520356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:02:19.520419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:02:19.520450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:02:19.520499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:02:19.520536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:02:19.520569Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:02:19.520613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:02:19.520652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:02:19.520681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:02:19.520921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:02:19.521091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:02:19.521146Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:02:19.521302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:02:19.521413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:02:19.521450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:02:19.521516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:02:19.521576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:02:19.521609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:02:19.521660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:02:19.521700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:02:19.521734Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:02:19.521956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:02:19.522091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-10T12:03:17.488031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1091; 2026-01-10T12:03:17.488114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=87287; 2026-01-10T12:03:17.488187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=87428; 2026-01-10T12:03:17.488264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-10T12:03:17.488729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=412; 2026-01-10T12:03:17.488778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=88604; 2026-01-10T12:03:17.488971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=130; 2026-01-10T12:03:17.489111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2026-01-10T12:03:17.489671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=505; 2026-01-10T12:03:17.490278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=527; 2026-01-10T12:03:17.507737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17357; 2026-01-10T12:03:17.524187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16304; 2026-01-10T12:03:17.524321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-10T12:03:17.524380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-10T12:03:17.524422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:03:17.524496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2026-01-10T12:03:17.524535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:03:17.524621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2026-01-10T12:03:17.524668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:03:17.524728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-10T12:03:17.524834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=69; 2026-01-10T12:03:17.524915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2026-01-10T12:03:17.524961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=137124; 2026-01-10T12:03:17.525097Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:03:17.525207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:03:17.525260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:03:17.525324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:03:17.525363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:03:17.525549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:03:17.525604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:03:17.525637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:03:17.525675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:03:17.525734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768046241861;tx_id=18446744073709551615;;current_snapshot_ts=1768046540874; 2026-01-10T12:03:17.525771Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:03:17.525808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:17.525840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:17.525923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:03:17.526112Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.033000s; 2026-01-10T12:03:17.536766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:03:17.537352Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:03:17.537443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:03:17.537523Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:03:17.537577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:03:17.537644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768046241861;tx_id=18446744073709551615;;current_snapshot_ts=1768046540874; 2026-01-10T12:03:17.537692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:03:17.537741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:17.537781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:17.537859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:03:17.537929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:03:17.538813Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.075000s; 2026-01-10T12:03:17.538869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> TInterconnectTest::TestBlobEvent220BytesPreSerialized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 16088, MsgBus: 19529 2026-01-10T12:02:49.012192Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702193405826212:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:49.012259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:49.434225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:49.434300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:49.434319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:49.507207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:49.640260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702189110858706:2081] 1768046568985105 != 1768046568985108 2026-01-10T12:02:49.690171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.724546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:49.836374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:49.836402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:49.836413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:49.836503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:50.015042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.428947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.442363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:50.511515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.682334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.858018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.957126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.337236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702210585697055:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.337361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.338234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702210585697065:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.338324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:53.729337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.773618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.819644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.861181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.897844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.995114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.015784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702193405826212:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:54.016023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:54.055599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.136049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:54.303396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214880665232:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.303497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.304053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214880665237:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.304122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702214880665238:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.304411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:54.310671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:54.352351Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702214880665241:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:02:54.435010Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702214880665292:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:02:56.872764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... ize from file: (empty maybe) 2026-01-10T12:03:12.612603Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:12.612695Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:13.247120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:13.261091Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:13.261414Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:13.269348Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:13.425838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:13.846406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:14.062754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:16.913518Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702309677844344:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:16.913618Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:16.914131Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702309677844354:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:16.914186Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:17.228210Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593702292497973350:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:17.228316Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:17.247502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.398279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.499564Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.587907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.690313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.801569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:17.902439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:18.086416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:18.323388Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702318267779824:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:18.323483Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:18.323912Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702318267779829:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:18.323957Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702318267779830:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:18.324202Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:18.332413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:18.357245Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702318267779833:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:18.448066Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702318267779886:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:21.919834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.799321Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7593702335447649671:2543], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01kekwmab4e0g8qd0kn52z4dy9. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZWY5ODEyY2EtNDQwYTNmZDgtMjhjZDFlZDctYzk5YTM5ZTg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-10T12:03:22.802644Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593702335447649671:2543], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01kekwmab4e0g8qd0kn52z4dy9. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZWY5ODEyY2EtNDQwYTNmZDgtMjhjZDFlZDctYzk5YTM5ZTg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-10T12:03:22.806514Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWY5ODEyY2EtNDQwYTNmZDgtMjhjZDFlZDctYzk5YTM5ZTg=, ActorId: [3:7593702331152682102:2543], ActorState: ExecuteState, LegacyTraceId: 01kekwmab4e0g8qd0kn52z4dy9, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> TProxyActorTest::TestDisconnectWhileAttaching >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::UpdateOn_Params >> KqpEffects::AlterDuringUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> TOlapNaming::CreateColumnStoreFailed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27308, MsgBus: 30543 2026-01-10T12:02:59.843650Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702235507746595:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:59.843718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:59.887175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:00.474159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:00.608399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:00.608480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:00.622837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:00.854490Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:00.862997Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:00.872216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702235507746362:2081] 1768046579810540 != 1768046579810543 2026-01-10T12:03:00.938066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:01.099914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:01.099934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:01.099940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:01.100013Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:02.096501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:02.174940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:02.401191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:02.687172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:02.832362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:04.845941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702235507746595:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:04.846036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:06.413291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702265572519333:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:06.413417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:06.420273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702265572519343:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:06.420380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:06.981600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.098060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.209205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.281110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.372741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.473274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.567757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.709471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:07.928820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702269867487531:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:07.928916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:07.929435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702269867487536:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:07.929478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702269867487537:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:07.929506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:07.934242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:07.955989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-10T12:03:07.957361Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702269867487540:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:08.056058Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702274162454887:3790] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:14.329634Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:14.620218Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:14.653777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:14.653850Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:14.656481Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702299356428991:2081] 1768046594263613 != 1768046594263616 2026-01-10T12:03:14.672029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:14.682710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:14.848427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:14.848451Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:14.848457Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:14.848527Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:14.888071Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:15.312357Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:15.890611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:15.899771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:15.910908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:16.050991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:16.271019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:16.454686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:19.278616Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593702299356429020:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:19.281583Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:21.513410Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702329421201975:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:21.513488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:21.514282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702329421201985:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:21.514333Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:21.795530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:21.888095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:21.938251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:21.993847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.046627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.111447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.192642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.268910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.445970Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702333716170177:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:22.446081Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:22.449878Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702333716170182:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:22.449994Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702333716170183:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:22.450165Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:22.455397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:22.472914Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702333716170186:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:22.535656Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702333716170239:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:24.600443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR cookie 0 ... waiting for blocked registrations (done) 2026-01-10T12:03:27.685410Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TOlap::CreateTableWithNullableKeysNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2026-01-10T12:02:44.288035Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702170361727963:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:44.292512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:44.954923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:44.955045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:45.018870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:45.129981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:45.140507Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702170361727772:2081] 1768046564257358 != 1768046564257361 2026-01-10T12:02:45.143036Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:45.264359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:45.367848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:45.404701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:45.404723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:45.404734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:45.404808Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:45.759254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:45.772691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:45.778445Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:02:45.778527Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7330200050] Connect to grpc://localhost:2287 2026-01-10T12:02:45.780864Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330200050] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-10T12:02:45.802378Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c7330200050] Status 14 Service Unavailable 2026-01-10T12:02:45.802807Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:45.802839Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:02:45.802981Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330200050] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-10T12:02:45.810564Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c7330200050] Status 1 CANCELLED 2026-01-10T12:02:45.812014Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-10T12:02:49.654217Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702195775230483:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:49.654287Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:49.698547Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:49.770659Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:49.944125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:49.953641Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.956257Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702195775230351:2081] 1768046569598888 != 1768046569598891 2026-01-10T12:02:49.972748Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:49.972821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.025272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.329557Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:50.329586Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:50.329592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:50.329668Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:50.332167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:50.668389Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.683643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.693238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:50.697782Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-10T12:02:50.697851Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c73302a21d0] Connect to grpc://localhost:12729 2026-01-10T12:02:50.699209Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c73302a21d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-10T12:02:50.720250Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c73302a21d0] Status 14 Service Unavailable 2026-01-10T12:02:50.720725Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:02:50.720759Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:50.720816Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-10T12:02:50.721108Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c73302a21d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-10T12:02:50.729571Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c73302a21d0] Status 14 Service Unavailable 2026-01-10T12:02:50.729900Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:02:50.729925Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:51.663866Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-10T12:02:51.663955Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-10T12:02:51.664420Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c73302a21d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-10T12:02:51.667132Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c73302a21d0] Status 14 Service Unavailable 2026-01-10T12:02:51.667411Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:02:51.667442Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:53.671670Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl. ... : [7c7330201550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-10T12:03:14.194423Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330201550] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-10T12:03:14.194679Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:14.195331Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (6968D2E8) asking for AccessServiceAuthorization( something.write) 2026-01-10T12:03:14.195538Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330201550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (6968D2E8)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service1" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } 0: "OK" 2026-01-10T12:03:14.197557Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330201550] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } } 2026-01-10T12:03:14.197809Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-10T12:03:14.198348Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (7A38211C) asking for AccessServiceAuthorization( something.write) 2026-01-10T12:03:14.198527Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330201550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (7A38211C)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service2" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } 0: "OK" 2026-01-10T12:03:14.200575Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330201550] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } } 2026-01-10T12:03:14.200846Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (7A38211C) () has now valid token of service2@as 2026-01-10T12:03:14.201401Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8853A21F) asking for AccessServiceAuthorization( something.write) 2026-01-10T12:03:14.201559Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330201550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (8853A21F)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service3" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } 0: "OK" 2026-01-10T12:03:14.203687Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330201550] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } } 2026-01-10T12:03:14.208295Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8853A21F) () has now valid token of service3@as 2026-01-10T12:03:20.577777Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593702327745055932:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:20.577826Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:20.700178Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:20.895839Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:20.895946Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:20.899260Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:20.904470Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593702327745055897:2081] 1768046600547467 != 1768046600547470 2026-01-10T12:03:20.922725Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:21.000254Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:21.220659Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:21.220681Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:21.220691Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:21.220780Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:21.604403Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:22.122453Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:22.130074Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:22.139118Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-10T12:03:22.139196Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7330303e50] Connect to grpc://localhost:63990 2026-01-10T12:03:22.140269Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330303e50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-10T12:03:22.171645Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330303e50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-10T12:03:22.171983Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:03:22.176448Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-10T12:03:22.176755Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7330303e50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-10T12:03:22.179472Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7330303e50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-10T12:03:22.179794Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> TKesusTest::TestQuoterHDRRParametersValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2026-01-10T12:02:41.344307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702157311924553:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:41.344581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:41.961819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:42.013176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:42.013272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:42.135076Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702157311924375:2081] 1768046561280774 != 1768046561280777 2026-01-10T12:02:42.146990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:42.155673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:42.163752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:42.348404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:42.392911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:42.392938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:42.392947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:42.393062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:42.787277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:42.807561Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:42.807713Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc97a1f0450] Connect to grpc://localhost:6401 2026-01-10T12:02:42.811441Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:02:42.827401Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f0450] Status 14 Service Unavailable 2026-01-10T12:02:42.827630Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-10T12:02:42.827681Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:42.827735Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:42.828112Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:02:42.830028Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f0450] Status 14 Service Unavailable 2026-01-10T12:02:42.830122Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-10T12:02:42.830148Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:43.328245Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:02:43.328319Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:43.328799Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:02:43.341670Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f0450] Status 14 Service Unavailable 2026-01-10T12:02:43.344385Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-10T12:02:43.344454Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:44.328517Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:02:44.328560Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:44.328884Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:02:44.334279Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f0450] Status 14 Service Unavailable 2026-01-10T12:02:44.334658Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-10T12:02:44.334688Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:02:46.318909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702157311924553:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:46.319055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:47.336801Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:02:47.336852Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:47.337177Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:02:47.339341Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc97a1f0450] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:02:47.339485Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2026-01-10T12:02:47.339593Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-10T12:02:55.716763Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702218830971471:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:55.716812Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:55.751564Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:55.832757Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:55.845802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:55.845882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:55.870842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:55.994342Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:56.056750Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:56.056775Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:56.056795Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:56.056874Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:56.425273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:56.435819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:02:56.441064Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:02:56.441159Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc97a2a5650] Connect to grpc://localhost:62605 2026-01-10T12:02:56.442121Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a2a5650] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:13.496852Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:13.537443Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:13.537529Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2026-01-10T12:03:13.537556Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2026-01-10T12:03:13.537581Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-10T12:03:13.537606Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2026-01-10T12:03:13.537672Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc97a1f31d0] Connect to grpc://localhost:13157 2026-01-10T12:03:13.538833Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f31d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:13.542804Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f31d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:13.543195Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f31d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:13.543387Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f31d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:13.543647Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f31d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:13.574663Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc97a1f31d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:13.574852Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f31d0] Status 16 Access Denied 2026-01-10T12:03:13.575059Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f31d0] Status 16 Access Denied 2026-01-10T12:03:13.575394Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-10T12:03:13.575482Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:13.575717Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f31d0] Status 16 Access Denied 2026-01-10T12:03:13.575959Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a1f31d0] Status 16 Access Denied 2026-01-10T12:03:13.576502Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:13.576551Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:13.577019Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-10T12:03:13.577057Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-10T12:03:13.577179Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc97a1f7ed0] Connect to grpc://localhost:6166 2026-01-10T12:03:13.578084Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a1f7ed0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-10T12:03:13.590530Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc97a1f7ed0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-10T12:03:13.591053Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-10T12:03:20.741038Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593702326489391104:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:20.768555Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:20.840215Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:21.144258Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:21.238923Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:21.252027Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:21.264394Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:21.265014Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593702326489391047:2081] 1768046600685428 != 1768046600685431 2026-01-10T12:03:21.497060Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:21.645397Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:21.770697Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:21.837451Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:21.837483Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:21.837494Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:21.837601Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:22.714467Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:22.734360Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:22.742285Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:22.742432Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-10T12:03:22.742534Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc97a32d050] Connect to grpc://localhost:18791 2026-01-10T12:03:22.743705Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a32d050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:22.753490Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a32d050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:22.770987Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc97a32d050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:03:22.771208Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a32d050] Status 14 Service Unavailable 2026-01-10T12:03:22.771338Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-10T12:03:22.771415Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2026-01-10T12:03:22.771452Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:03:22.771492Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-10T12:03:22.771607Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-10T12:03:22.771983Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a32d050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:22.787712Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc97a32d050] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-10T12:03:22.792157Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a32d050] Status 1 CANCELLED 2026-01-10T12:03:22.792438Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc97a32d050] Status 1 CANCELLED 2026-01-10T12:03:22.796381Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1488: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2026-01-10T12:03:22.796471Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2026-01-10T12:03:22.796510Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> VDiskBalancing::TestRandom_Block42 [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> KqpWrite::UpsertNullKey [GOOD] >> KqpWrite::ProjectReplace-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2026-01-10T12:03:30.607601Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:30.607736Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:30.648194Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:30.648365Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:30.693955Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:30.694876Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=12726871677841487698, session=0, seqNo=0) 2026-01-10T12:03:30.695050Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:03:30.710436Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=12726871677841487698, session=1) 2026-01-10T12:03:30.710787Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=5669903894259536909, session=0, seqNo=0) 2026-01-10T12:03:30.710922Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:03:30.725995Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=5669903894259536909, session=2) 2026-01-10T12:03:30.727251Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:03:30.727410Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:03:30.727510Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:03:30.742162Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:03:30.742526Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-10T12:03:30.742671Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-10T12:03:30.742780Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-10T12:03:30.762133Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=112) 2026-01-10T12:03:30.762611Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2026-01-10T12:03:30.762736Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-10T12:03:30.762973Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:03:30.763063Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2026-01-10T12:03:30.763135Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-10T12:03:30.763259Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-10T12:03:30.783797Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2026-01-10T12:03:30.783889Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2026-01-10T12:03:30.783935Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=223) 2026-01-10T12:03:30.784283Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=334, name="Lock2") 2026-01-10T12:03:30.784382Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-10T12:03:30.784457Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:03:30.798352Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=334) 2026-01-10T12:03:30.799025Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:166:2188], cookie=4678783378017875303, name="Lock1") 2026-01-10T12:03:30.799136Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:166:2188], cookie=4678783378017875303) 2026-01-10T12:03:30.799642Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:169:2191], cookie=6163913376098848416, name="Lock2") 2026-01-10T12:03:30.799708Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:169:2191], cookie=6163913376098848416) 2026-01-10T12:03:30.827406Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:30.827564Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:30.828052Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:30.836034Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:30.890258Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:30.890450Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:03:30.890502Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-10T12:03:30.890876Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:208:2221], cookie=14900715724326462842, name="Lock1") 2026-01-10T12:03:30.890982Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:208:2221], cookie=14900715724326462842) 2026-01-10T12:03:30.891529Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:216:2228], cookie=14231808411666009945, name="Lock2") 2026-01-10T12:03:30.891589Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:216:2228], cookie=14231808411666009945) 2026-01-10T12:03:32.144873Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:32.144986Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:32.227591Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:32.230513Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:32.285048Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:32.285911Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=5479515664073306969, session=0, seqNo=0) 2026-01-10T12:03:32.286059Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:03:32.302261Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=5479515664073306969, session=1) 2026-01-10T12:03:32.302597Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=4351775191324162371, session=0, seqNo=0) 2026-01-10T12:03:32.302788Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:03:32.320284Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=4351775191324162371, session=2) 2026-01-10T12:03:32.321345Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:03:32.321492Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:03:32.321585Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:03:32.335386Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-10T12:03:32.335749Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-10T12:03:32.335906Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-10T12:03:32.335991Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-10T12:03:32.350113Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=112) 2026-01-10T12:03:32.350525Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=333, session=1, semaphore="Lock1" count=1) 2026-01-10T12:03:32.350823Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:03:32.350927Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-10T12:03:32.351076Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-10T12:03:32.369008Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=333) 2026-01-10T12:03:32.369112Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2026-01-10T12:03:32.369143Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=223) 2026-01-10T12:03:32.369813Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=14546018649366343102, name="Lock1") 2026-01-10T12:03:32.369957Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=14546018649366343102) 2026-01-10T12:03:32.370618Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=10202917872589742379, name="Lock2") 2026-01-10T12:03:32.370714Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=10202917872589742379) 2026-01-10T12:03:32.371220Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2192], cookie=4824651567996618162, name="Lock1") 2026-01-10T12:03:32.371300Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2192], cookie=4824651567996618162) 2026-01-10T12:03:32.371917Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=9655524040288159623, name="Lock2") 2026-01-10T12:03:32.371988Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=9655524040288159623) 2026-01-10T12:03:32.381535Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=444, session=2, semaphore="Lock2" count=1) 2026-01-10T12:03:32.381690Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:03:32.394498Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=444) 2026-01-10T12:03:32.395149Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:178:2200], cookie=3362470244638408356, name="Lock2") 2026-01-10T12:03:32.395238Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:178:2200], cookie=3362470244638408356) 2026-01-10T12:03:32.395682Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:181:2203], cookie=14958906161662711118, name="Lock2") 2026-01-10T12:03:32.395744Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:181:2203], cookie=14958906161662711118) 2026-01-10T12:03:32.462967Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:32.463083Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:32.463605Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:32.464252Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:32.509429Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:32.509582Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:03:32.509635Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-10T12:03:32.509665Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-10T12:03:32.509690Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:03:32.510007Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:220:2233], cookie=7955761079181543660, name="Lock1") 2026-01-10T12:03:32.510098Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:220:2233], cookie=7955761079181543660) 2026-01-10T12:03:32.510701Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:228:2240], cookie=16117501293556010425, name="Lock2") 2026-01-10T12:03:32.510773Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:228:2240], cookie=16117501293556010425) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 6930, MsgBus: 14131 ... waiting for SysViewsRoster update finished 2026-01-10T12:02:50.079859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:50.207833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:50.216764Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:50.217186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:50.217306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:50.689480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:50.689685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.696180Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046566427974 != 1768046566427977 2026-01-10T12:02:50.724929Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:50.797317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.938741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:51.628444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:51.628504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:51.628536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:51.629083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:51.641454Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:52.471748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:52.561065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:52.960098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.492300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:53.916279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:55.201999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.202215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.203339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.203600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:55.295490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:55.464271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:55.785343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.182039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.509087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.828108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:57.105639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:57.498187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:57.916533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:57.916694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:57.917236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:57.917333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:57.917408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2806:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:57.924341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:02:58.087664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2808:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:02:58.156645Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2863:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:01.918465Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3176:4457] TxId: 281474976715674. Ctx: { TraceId: 01kekwkn5r7crqq2zb4g050gw0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjhlNTUxYjEtYmEzZWM0MTktODZiMzE0YWYtNTJiYTdmZmM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# { < ... ode 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MmE4MjViMzItNjNmZmFjNjItMjJjMGU2OGEtYTU3MTE3YzM=, ActorId: [2:7593702301097635792:2573], ActorState: ReadyState, LegacyTraceId: 01kekwm3by5qy1bssy1tnwxvd9, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekwm2ymacypd6h3razqqjr4" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 17730, MsgBus: 7626 2026-01-10T12:03:17.860237Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:17.860334Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:17.960920Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:17.960996Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:17.961123Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:17.962816Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593702312754117367:2081] 1768046597585243 != 1768046597585246 2026-01-10T12:03:18.023130Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:18.120300Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:18.308033Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:18.308056Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:18.308063Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:18.308148Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:18.643815Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:19.423108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:19.440864Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:19.458561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:19.652988Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.030537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.224448Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.909490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702338523923021:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.909568Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.909831Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702338523923031:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.909892Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.173385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.240871Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.326078Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.418270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.491869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.622442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.711097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.938278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.074238Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702347113858516:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.074363Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.074692Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702347113858521:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.074737Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702347113858522:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.074775Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.088261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:25.114715Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702347113858525:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:25.204015Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702347113858579:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:28.213286Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 29490, MsgBus: 12311 2026-01-10T12:02:53.866011Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702212357822108:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:53.866041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:02:54.449982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:02:54.585944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:54.586045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:54.695010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702212357822084:2081] 1768046573860154 != 1768046573860157 2026-01-10T12:02:54.729488Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:54.734496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:54.736350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:02:54.998317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:55.074477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:02:55.074505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:02:55.074511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:02:55.074587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:02:55.927564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:56.085201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.355365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.657517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:56.786693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:58.868308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702212357822108:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:02:58.868371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:02:59.154484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702238127627738:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.154589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.155088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702238127627748:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.155149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:59.575912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.655249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.720137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.777330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.820690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:59.889059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.006153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.152285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:00.346627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702242422595930:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.346723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.347447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702242422595936:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.347706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702242422595935:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.347740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:00.351908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:00.384047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702242422595939:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:00.453803Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702242422595990:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:03.095485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... domain.cpp:311) 2026-01-10T12:03:20.053218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.173642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.514917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.650153Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.045238Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702350182881251:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.045312Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.045735Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702350182881261:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.045781Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.163326Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.247597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.301105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.371581Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.437620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.505235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.593842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.787265Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.978882Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702350182882147:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.979002Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.979616Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702350182882152:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.979709Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702350182882153:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.979891Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:25.984170Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:25.999070Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702350182882156:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:26.061103Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702354477849515:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:28.870189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:29.833576Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2026-01-10T12:03:29.833826Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-10T12:03:29.834002Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-10T12:03:29.834279Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7593702367362751959:2539], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7593702363067784409:2539]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7593702367362751959:2539].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-10T12:03:29.835008Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593702367362751927:2539], SessionActorId: [3:7593702363067784409:2539], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[3:7593702363067784409:2539]. 2026-01-10T12:03:29.835377Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=N2U4Y2NmZTQtYTBjZmZjOGYtZmQ1ZjdhMzktODAzNzg5OQ==, ActorId: [3:7593702363067784409:2539], ActorState: ExecuteState, LegacyTraceId: 01kekwmh7g226bjp2681a1xj85, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593702367362751954:2539] from: [3:7593702367362751927:2539] trace_id# 2026-01-10T12:03:29.835551Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593702367362751954:2539] TxId: 281474976710677. Ctx: { TraceId: 01kekwmh7g226bjp2681a1xj85, Database: /Root, SessionId: ydb://session/3?node_id=3&id=N2U4Y2NmZTQtYTBjZmZjOGYtZmQ1ZjdhMzktODAzNzg5OQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-10T12:03:29.836027Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=N2U4Y2NmZTQtYTBjZmZjOGYtZmQ1ZjdhMzktODAzNzg5OQ==, ActorId: [3:7593702363067784409:2539], ActorState: ExecuteState, LegacyTraceId: 01kekwmh7g226bjp2681a1xj85, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TPopulatorTestWithResets::UpdateAck >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CustomDefaultPresets >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-10T12:03:36.533305Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-10T12:03:36.562017Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-10T12:03:36.562138Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-10T12:03:36.573015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-10T12:03:36.573118Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-10T12:03:36.573190Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-10T12:03:36.573294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-10T12:03:36.573320Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-10T12:03:36.573343Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-10T12:03:36.573421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-10T12:03:36.573447Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-10T12:03:36.573468Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-10T12:03:36.573552Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-10T12:03:36.573619Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-10T12:03:36.573648Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-10T12:03:36.573686Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-10T12:03:36.573738Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-10T12:03:36.573763Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-10T12:03:36.573782Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-10T12:03:36.573840Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-10T12:03:36.573864Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-10T12:03:36.573882Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-10T12:03:36.573957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-10T12:03:36.574043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.574211Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-10T12:03:36.574314Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.574403Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.574544Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-10T12:03:36.574650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-10T12:03:36.574727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.574798Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-10T12:03:36.574865Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-10T12:03:36.574933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.575029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-10T12:03:36.575102Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.575170Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:03:36.575231Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.575288Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-10T12:03:36.575398Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-10T12:03:36.575445Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.575506Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-10T12:03:36.575560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:03:36.575641Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.575709Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-10T12:03:36.575759Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:03:36.575800Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.575880Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:03:36.575969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-10T12:03:36.576044Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-10T12:03:36.592727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:03:36.592923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-10T12:03:36.592990Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-10T12:03:36.593057Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:03:36.593178Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2026-01-10T12:03:36.593270Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-10T12:03:36.593331Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-10T12:03:36.593373Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-10T12:03:36.593431Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:03:36.593536Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2026-01-10T12:03:36.593589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-10T12:03:36.593628Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:03:36.593694Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-10T12:03:36.593750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:03:36.593806Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:03:36.593844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:03:36.593884Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] 2026-01-10T12:03:36.593942Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-10T12:03:36.593974Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-01-10T12:03:36.594015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:03:36.594082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-10T12:03:36.594107Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:03:36.594193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:03:36.594246Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:03:36.594303Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:03:36.594335Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-10T12:03:36.594370Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:03:36.594429Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-10T12:03:36.594471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-10T12:03:36.594537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:03:36.594600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-10T12:03:36.594655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-10T12:03:36.594861Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-10T12:03:36.608367Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-10T12:03:36.608462Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TMLPWriterTests::WriteOneMessage [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> TMLPWriterTests::WriteTwoMessage_OnePartition >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2026-01-10T12:03:35.544038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:35.544155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ulator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.040273Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.040363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.040450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.040542Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.040645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.040704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.040762Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.040840Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.040891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.040941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.040982Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041080Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.041131Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.041238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041303Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041417Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.041464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041623Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.041674Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041761Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.041802Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.041905Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.041991Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.042045Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-10T12:03:37.042127Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.042190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-10T12:03:37.042263Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.042308Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-10T12:03:37.042368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-10T12:03:37.042415Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-10T12:03:37.042507Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.042573Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:97:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2026-01-10T12:03:37.042635Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-10T12:03:37.042721Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-10T12:03:37.042789Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-10T12:03:37.042854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-10T12:03:37.042892Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2026-01-10T12:03:37.042955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-10T12:03:37.043015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] TestWaitNotification: OK eventTxId 100 |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TModifyUserTest::ModifyUser >> TOlap::CustomDefaultPresets [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 >> TPopulatorTest::RemoveDir [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 16394245769804044180 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-01-10T11:55:32.342568Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2026-01-10T11:55:32.583921Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2026-01-10T11:55:38.800501Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2026-01-10T11:55:38.800768Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2026-01-10T11:55:38.800981Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7677:16] ServerId# [1:7685:1099] TabletId# 72057594037932033 PipeClientId# [5:7677:16] 2026-01-10T11:55:38.801138Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2026-01-10T11:55:38.801285Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2026-01-10T11:55:38.801405Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Statu ... 936 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Stop node 3 2026-01-10T12:02:30.768325Z 1 00h25m30.749429s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 4 2026-01-10T12:02:31.377388Z 1 00h25m40.749710s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 7 2026-01-10T12:02:34.283979Z 1 00h26m10.753072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 1 2026-01-10T12:02:34.958892Z 1 00h26m20.753584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 1 2026-01-10T12:02:36.391519Z 1 00h26m40.754608s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Starting nodes Start compaction 1 Start checking |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:30.497558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:30.497675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:30.497742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:30.497792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:30.497839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:30.497883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:30.497972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:30.498089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:30.499110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:30.499451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:30.619213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:30.619294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:30.638781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:30.638916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:30.639147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:30.648508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:30.648820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:30.649491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:30.649733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:30.651783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:30.651971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:30.653513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:30.653593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:30.653668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:30.653722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:30.653781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:30.654081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:30.796178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.797961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.798495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... MESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-10T12:03:39.154176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:39.154237Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:03:39.154311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-10T12:03:39.155305Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:03:39.155431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:03:39.155501Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:03:39.155560Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:03:39.155611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:03:39.156610Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:03:39.156697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:03:39.156729Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:03:39.156762Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:03:39.156798Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:03:39.156878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:03:39.168618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-10T12:03:39.168726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-10T12:03:39.168821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-10T12:03:39.169334Z node 3 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-10T12:03:39.169446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-10T12:03:39.169568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-10T12:03:39.170565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:03:39.181305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:03:39.192847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:39.213060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-10T12:03:39.213137Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:03:39.213281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:03:39.220918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:39.221116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:39.221167Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:03:39.221316Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:03:39.221360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:39.221405Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:03:39.221444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:39.221487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:03:39.221571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:744:2721] message: TxId: 102 2026-01-10T12:03:39.221629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:39.221695Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:03:39.221736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:03:39.221903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:03:39.225028Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:03:39.225094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:808:2777] TestWaitNotification: OK eventTxId 102 2026-01-10T12:03:39.225718Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:03:39.226059Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 372us result status StatusSuccess 2026-01-10T12:03:39.226667Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2026-01-10T12:03:39.361848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:39.361914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 46678944, LocalPathId: 1] Version: 8 }: sender# [1:18:2065], cookie# 101 2026-01-10T12:03:40.023600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:97:2125], cookie# 101 2026-01-10T12:03:40.023645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 101 2026-01-10T12:03:40.023737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 101 2026-01-10T12:03:40.023797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 101 2026-01-10T12:03:40.023990Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:98:2126], cookie# 101 2026-01-10T12:03:40.024052Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 8 2026-01-10T12:03:40.024411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:99:2127], cookie# 101 2026-01-10T12:03:40.024594Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 101 2026-01-10T12:03:40.027470Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 101 2026-01-10T12:03:40.027536Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-10T12:03:40.027678Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 101 2026-01-10T12:03:40.027710Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-10T12:03:40.030542Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 447, preserialized size# 65 2026-01-10T12:03:40.030612Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 9 2026-01-10T12:03:40.030766Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-10T12:03:40.030816Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-10T12:03:40.030862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:03:40.031071Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000038, drop txId: 101" Path: "/Root/DirB" PathId: 38 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 307, preserialized size# 0 2026-01-10T12:03:40.031106Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 101, is deletion# true, version: 0 2026-01-10T12:03:40.031184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-10T12:03:40.031234Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-10T12:03:40.031279Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-10T12:03:40.031470Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:12:2059], cookie# 101 2026-01-10T12:03:40.031523Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:15:2062], cookie# 101 2026-01-10T12:03:40.031565Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:18:2065], cookie# 101 2026-01-10T12:03:40.031767Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:97:2125], cookie# 101 2026-01-10T12:03:40.031823Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2026-01-10T12:03:40.031867Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2026-01-10T12:03:40.031901Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2026-01-10T12:03:40.031961Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:98:2126], cookie# 101 2026-01-10T12:03:40.031988Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 9 2026-01-10T12:03:40.032707Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:99:2127], cookie# 101 2026-01-10T12:03:40.032886Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:97:2125], cookie# 101 2026-01-10T12:03:40.033069Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:98:2126], cookie# 101 2026-01-10T12:03:40.033112Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 18446744073709551615 2026-01-10T12:03:40.033821Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:99:2127], cookie# 101 2026-01-10T12:03:40.033859Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |83.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> TSubDomainTest::LsLs |83.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |83.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> DescribeSchemaSecretsService::SchemeCacheRetryErrors [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> TSubDomainTest::UserAttributes >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8453, MsgBus: 21233 2026-01-10T12:03:04.449553Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702256495263900:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:04.449767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:04.916245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:04.935485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:04.935572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:05.035473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:05.083640Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:05.084883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702256495263709:2081] 1768046584408040 != 1768046584408043 2026-01-10T12:03:05.099519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:05.296671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:05.296693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:05.296700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:05.296775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:05.404547Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:05.883472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:05.892888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:05.979850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:06.172103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:06.492828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:06.667817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:09.463295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702256495263900:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:09.463393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:09.500595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702277970102065:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:09.500703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:09.501160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702277970102074:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:09.501198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:09.925302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.010296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.045275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.092069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.149428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.211508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.309394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.383894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:10.529485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702282265070249:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.529583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.530227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702282265070254:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.530271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702282265070255:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.530386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:10.534798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:10.555297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-10T12:03:10.556157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702282265070258:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:10.657744Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702282265070309:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChan ... #72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:29.418437Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:29.662786Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:29.662832Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:29.662839Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:29.662926Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:29.844210Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:29.892255Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:30.914805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:30.925536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:30.943295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:31.142154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:31.856944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:32.009808Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.537318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702401984708420:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:37.537449Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:37.537998Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702401984708430:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:37.538067Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:37.663606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.741770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.836784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.907186Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.969115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.109557Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.197835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.293738Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.536600Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702406279676607:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.536718Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.537291Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702406279676612:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.537337Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702406279676613:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.537379Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.547651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:38.583168Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702406279676616:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:38.650530Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702406279676667:3784] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:40.973197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:41.274669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:03:41.327650Z node 3 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037927 cannot parse tx 281474976715675: Table '/Root/TestTable' scheme changed. 2026-01-10T12:03:41.328170Z node 3 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [3:7593702419164578957:2528] TxId: 281474976715675. Ctx: { TraceId: 01kekwmwm6atvasg1gftta3wag, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzRiNTdhODMtN2EwN2U5ODctYzU2ZWMwOS05OTI0ZWZjYg==, PoolId: default, IsStreamingQuery: 0}. ERROR: [SCHEME_CHANGED] Table '/Root/TestTable' scheme changed.; trace_id# 2026-01-10T12:03:41.328568Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NzRiNTdhODMtN2EwN2U5ODctYzU2ZWMwOS05OTI0ZWZjYg==, ActorId: [3:7593702414869611524:2528], ActorState: ExecuteState, LegacyTraceId: 01kekwmwm6atvasg1gftta3wag, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/TestTable\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> KqpWrite::ProjectReplace-UseSink [GOOD] >> KqpStats::SysViewClientLost >> KqpQuery::SelectWhereInSubquery >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> TSubDomainTest::StartAndStopTenanNode >> KqpExplain::SortStage >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4418, MsgBus: 31220 2026-01-10T12:03:18.226386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702316918586258:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:18.248213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:18.277340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:18.862915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:18.862994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:18.864141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:18.876266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:19.087651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:19.099686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702316918586156:2081] 1768046598197950 != 1768046598197953 2026-01-10T12:03:19.132428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:19.268025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:19.396802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:19.396822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:19.396828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:19.396934Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:20.588107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:20.605031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:20.759793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:21.164707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:21.795118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.059748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.221560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702316918586258:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:23.224737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:26.941834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702351278326437:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.941939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.942783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702351278326447:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.942832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.727729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.830008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.898626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.950711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.132595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.218459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.296004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.410830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.514684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702359868261936:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.514781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.515240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702359868261941:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.515292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702359868261942:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.515421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.522457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:28.595904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702359868261945:2499], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:28.689756Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702359868261997:3801] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20484, MsgBus: 20030 2026-01-10T12:03:34.096221Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:34.096488Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:34.367707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:34.370176Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:34.375499Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:34.375859Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:34.380136Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702383397974238:2081] 1768046613931215 != 1768046613931218 2026-01-10T12:03:34.393444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:34.688254Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:34.688278Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:34.688283Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:34.688359Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:34.758786Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:35.044270Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:36.500703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:36.548183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:36.718252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.070885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:37.189267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.870262Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702409167779892:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.870358Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.870879Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702409167779902:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.870931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.954571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.008979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.051147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.092880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.142970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.196409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.251853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.336816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:40.439356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702413462748072:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:40.439447Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:40.439562Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702413462748077:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:40.439689Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702413462748079:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:40.439746Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:40.443534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:40.464506Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702413462748081:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:40.551805Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702413462748132:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15966, MsgBus: 4283 2026-01-10T12:03:20.166346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702327320917593:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:20.166389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:20.821127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:20.860241Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702327320917463:2081] 1768046600139931 != 1768046600139934 2026-01-10T12:03:20.874564Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:20.879922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:20.880020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:20.905852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:21.056590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:21.215952Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:21.297643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:21.297663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:21.297695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:21.297766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:22.473542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:22.558686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.052357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.438602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.615547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.168336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702327320917593:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:25.181309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:26.436053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702353090723129:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.436195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.440572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702353090723139:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.440671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:26.833933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:26.881984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:26.925721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:26.967996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.046193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.113835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.205727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.312829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:27.473418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702357385691305:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.473503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.473889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702357385691311:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.474186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702357385691310:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.474219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.479415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:27.496388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702357385691314:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:27.564310Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702357385691365:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:30.038651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 26900, MsgBus: 3457 2026-01-10T12:03:33.042275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:33.112231Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:33.248282Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:33.351758Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702378297969449:2081] 1768046612964441 != 1768046612964444 2026-01-10T12:03:33.360768Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:33.401389Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:33.401466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:33.449658Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:33.466233Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:33.728679Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:33.728701Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:33.728707Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:33.728781Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:34.009555Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:34.678165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:34.712551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:34.955401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:35.253129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:35.437681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.639992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702404067775101:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.640114Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.640763Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702404067775111:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.640826Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:38.718352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.786264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.830651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.872437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.913732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:38.969668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.022086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.116326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.284503Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702408362743283:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.284596Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.285033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702408362743288:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.285074Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702408362743289:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.285340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:39.289341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:39.312533Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593702408362743292:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:39.372942Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702408362743343:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:42.266628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpExplain::LimitOffset |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] >> DataShardStats::BackupTableStatsReportInterval >> TOlapNaming::AlterColumnTableOk [GOOD] >> KqpQuery::UdfTerminate >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn >> test_workload.py::TestYdbMixedWorkload::test[row] [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:30.370229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:30.370352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:30.370413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:30.370456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:30.370496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:30.370528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:30.370578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:30.370680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:30.371605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:30.371935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:30.539261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:30.539329Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:30.555429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:30.555546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:30.555757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:30.565224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:30.565457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:30.566202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:30.566454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:30.568900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:30.569109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:30.570347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:30.570411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:30.570494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:30.570535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:30.570574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:30.570863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:30.819037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.827978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:30.828613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -10T12:03:46.360319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.360419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.367340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.367490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.368435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.369029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.370785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.372792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.379779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.379990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.380718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.381559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.381665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.381739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.381868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.381932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.382023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.382154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.382240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.386948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:03:46.387646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:03:46.387762Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:03:46.387799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:46.387856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:03:46.387894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:46.387933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:03:46.388010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:3123:4342] message: TxId: 102 2026-01-10T12:03:46.388062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:03:46.388157Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:03:46.388199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:03:46.389336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-10T12:03:46.394490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:03:46.394574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:4029:5187] TestWaitNotification: OK eventTxId 102 |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> TSubDomainTest::LsAltered [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2026-01-10T12:03:43.114921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702427023482327:2100];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:43.121748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:43.463367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:43.463479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:43.519059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:43.522398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:43.556204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702427023482253:2081] 1768046623106921 != 1768046623106924 2026-01-10T12:03:43.560715Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:43.682174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:03:43.880308Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702427023482515:2105] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:43.880391Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702427023482819:2267] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:43.880531Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702427023482522:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:43.880638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702427023482688:2199][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702427023482522:2108], cookie# 1 2026-01-10T12:03:43.883521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702427023482720:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482717:2199], cookie# 1 2026-01-10T12:03:43.883593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702427023482721:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482718:2199], cookie# 1 2026-01-10T12:03:43.883610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702427023482722:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482719:2199], cookie# 1 2026-01-10T12:03:43.883664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702427023482221:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482720:2199], cookie# 1 2026-01-10T12:03:43.883702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702427023482224:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482721:2199], cookie# 1 2026-01-10T12:03:43.883721Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702427023482227:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702427023482722:2199], cookie# 1 2026-01-10T12:03:43.883767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702427023482720:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482221:2049], cookie# 1 2026-01-10T12:03:43.883801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702427023482721:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482224:2052], cookie# 1 2026-01-10T12:03:43.883818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702427023482722:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482227:2055], cookie# 1 2026-01-10T12:03:43.883859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702427023482688:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482717:2199], cookie# 1 2026-01-10T12:03:43.883896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702427023482688:2199][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:43.883952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702427023482688:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482718:2199], cookie# 1 2026-01-10T12:03:43.883978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702427023482688:2199][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:43.884005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702427023482688:2199][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702427023482719:2199], cookie# 1 2026-01-10T12:03:43.884018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702427023482688:2199][/dc-1] Sync cookie mismatch: sender# [1:7593702427023482719:2199], cookie# 1, current cookie# 0 2026-01-10T12:03:43.884101Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702427023482522:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:43.901332Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702427023482522:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702427023482688:2199] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:43.901460Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702427023482522:2108], cacheItem# { Subscriber: { Subscriber: [1:7593702427023482688:2199] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:43.906121Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702427023482820:2268], recipient# [1:7593702427023482819:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:43.906459Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702427023482819:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:43.948772Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593702427023482819:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:43.954601Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593702427023482819:2267] Handle TEvDescribeSchemeResult Forward to# [1:7593702427023482818:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:43.960542Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593702427023482515:2105] Handle TEvProposeTransaction 2026-01-10T12:03:43.960569Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593702427023482515:2105] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:03:43.960629Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593702427023482515:2105] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593702427023482826:2273] 2026-01-10T12:03:44.061429Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593702427023482826:2273] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" ... RD_POPULATOR DEBUG: populator.cpp:758: [2:7593702444859692285:2251] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7593702444859692290:2256], cookie# 281474976710662 2026-01-10T12:03:47.843238Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [2:7593702444859692285:2251] Ack for unknown update (already acked?): sender# [2:7593702444859692290:2256], cookie# 281474976710662 2026-01-10T12:03:47.843350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710662 2026-01-10T12:03:47.843429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710662 2026-01-10T12:03:47.843449Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710662 2026-01-10T12:03:47.843488Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2026-01-10T12:03:47.843504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-01-10T12:03:47.843557Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710662, subscribers: 0 2026-01-10T12:03:47.844038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 2026-01-10T12:03:47.845188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-10T12:03:47.845846Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7593702444859691953:2102] Handle TEvNavigate describe path /dc-1/USER_0 2026-01-10T12:03:47.845872Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7593702444859692438:2362] HANDLE EvNavigateScheme /dc-1/USER_0 2026-01-10T12:03:47.845969Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593702444859692025:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:47.846034Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7593702444859692369:2309][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7593702444859692025:2118], cookie# 10 2026-01-10T12:03:47.846127Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593702444859692373:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692370:2309], cookie# 10 2026-01-10T12:03:47.846141Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593702444859692374:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692371:2309], cookie# 10 2026-01-10T12:03:47.846177Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593702444859692375:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692372:2309], cookie# 10 2026-01-10T12:03:47.846206Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593702444859691722:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692373:2309], cookie# 10 2026-01-10T12:03:47.846230Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593702444859691725:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692374:2309], cookie# 10 2026-01-10T12:03:47.846247Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593702444859691728:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7593702444859692375:2309], cookie# 10 2026-01-10T12:03:47.846281Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593702444859692373:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859691722:2049], cookie# 10 2026-01-10T12:03:47.846330Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593702444859692374:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859691725:2052], cookie# 10 2026-01-10T12:03:47.846353Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593702444859692375:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859691728:2055], cookie# 10 2026-01-10T12:03:47.846387Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593702444859692369:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859692370:2309], cookie# 10 2026-01-10T12:03:47.846405Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7593702444859692369:2309][/dc-1/USER_0] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:47.846428Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593702444859692369:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859692371:2309], cookie# 10 2026-01-10T12:03:47.846445Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7593702444859692369:2309][/dc-1/USER_0] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:47.846469Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593702444859692369:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7593702444859692372:2309], cookie# 10 2026-01-10T12:03:47.846480Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7593702444859692369:2309][/dc-1/USER_0] Sync cookie mismatch: sender# [2:7593702444859692372:2309], cookie# 10, current cookie# 0 2026-01-10T12:03:47.846512Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7593702444859692025:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-10T12:03:47.846569Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7593702444859692025:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7593702444859692369:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1768046627781 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:47.846627Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593702444859692025:2118], cacheItem# { Subscriber: { Subscriber: [2:7593702444859692369:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1768046627781 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-10T12:03:47.846793Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593702444859692439:2363], recipient# [2:7593702444859692438:2362], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:47.846826Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7593702444859692438:2362] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:47.846883Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7593702444859692438:2362] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-01-10T12:03:47.847378Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7593702444859692438:2362] Handle TEvDescribeSchemeResult Forward to# [2:7593702444859692437:2361] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1768046627781 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2026-01-10T12:03:42.514020Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702420921234375:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:42.514157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:42.530852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:42.950171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:42.957986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:42.958092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:42.969140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:43.043743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:43.045956Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702420921234226:2081] 1768046622454199 != 1768046622454202 2026-01-10T12:03:43.117678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:03:43.262673Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702420921234474:2104] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:43.262734Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702425216202083:2267] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:43.262848Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702420921234499:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:43.262944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702420921234679:2209][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702420921234499:2118], cookie# 1 2026-01-10T12:03:43.272480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702420921234708:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234705:2209], cookie# 1 2026-01-10T12:03:43.272567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702420921234709:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234706:2209], cookie# 1 2026-01-10T12:03:43.272605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702420921234710:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234707:2209], cookie# 1 2026-01-10T12:03:43.272654Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702420921234200:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234710:2209], cookie# 1 2026-01-10T12:03:43.272722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702420921234710:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234200:2055], cookie# 1 2026-01-10T12:03:43.272774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702420921234679:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234707:2209], cookie# 1 2026-01-10T12:03:43.272829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702420921234679:2209][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:43.272867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702420921234194:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234708:2209], cookie# 1 2026-01-10T12:03:43.272886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702420921234197:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702420921234709:2209], cookie# 1 2026-01-10T12:03:43.272909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702420921234708:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234194:2049], cookie# 1 2026-01-10T12:03:43.272925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702420921234709:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234197:2052], cookie# 1 2026-01-10T12:03:43.272946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702420921234679:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234705:2209], cookie# 1 2026-01-10T12:03:43.272975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702420921234679:2209][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:43.273016Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702420921234679:2209][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702420921234706:2209], cookie# 1 2026-01-10T12:03:43.273031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702420921234679:2209][/dc-1] Sync cookie mismatch: sender# [1:7593702420921234706:2209], cookie# 1, current cookie# 0 2026-01-10T12:03:43.273099Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702420921234499:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:43.284230Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702420921234499:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702420921234679:2209] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:43.284372Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702420921234499:2118], cacheItem# { Subscriber: { Subscriber: [1:7593702420921234679:2209] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:43.287204Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702425216202084:2268], recipient# [1:7593702425216202083:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:43.292243Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702425216202083:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:43.342824Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593702425216202083:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:43.346952Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593702425216202083:2267] Handle TEvDescribeSchemeResult Forward to# [1:7593702425216202082:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:43.353738Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593702420921234474:2104] Handle TEvProposeTransaction 2026-01-10T12:03:43.353790Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593702420921234474:2104] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:03:43.353848Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593702420921234474:2104] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593702425216202089:2272] 2026-01-10T12:03:43.517724Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected eve ... ion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-10T12:03:47.817302Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593702443114569969:2337], recipient# [2:7593702443114569968:2336], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:47.817333Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7593702443114569968:2336] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:47.817412Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7593702443114569968:2336] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:47.818104Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7593702443114569968:2336] Handle TEvDescribeSchemeResult Forward to# [2:7593702443114569967:2335] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1768046627305 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:47.880223Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593702438819602270:2117], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:47.880303Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [2:7593702438819602270:2117], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-10T12:03:47.880503Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:03:47.880927Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7593702438819601968:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7593702443114569975:2338] 2026-01-10T12:03:47.880938Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7593702438819601968:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:47.880990Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7593702438819601968:2049] Subscribe: subscriber# [2:7593702443114569975:2338], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:03:47.881023Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7593702438819601971:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7593702443114569976:2338] 2026-01-10T12:03:47.881030Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7593702438819601971:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:47.881049Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7593702438819601971:2052] Subscribe: subscriber# [2:7593702443114569976:2338], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:03:47.881069Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7593702438819601974:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7593702443114569977:2338] 2026-01-10T12:03:47.881076Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7593702438819601974:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:47.881093Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7593702438819601974:2055] Subscribe: subscriber# [2:7593702443114569977:2338], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:03:47.881124Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7593702443114569975:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702438819601968:2049] 2026-01-10T12:03:47.881142Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7593702443114569976:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702438819601971:2052] 2026-01-10T12:03:47.881177Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7593702443114569977:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702438819601974:2055] 2026-01-10T12:03:47.881218Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702443114569972:2338] 2026-01-10T12:03:47.881271Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702443114569973:2338] 2026-01-10T12:03:47.881301Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Set up state: owner# [2:7593702438819602270:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:47.881326Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7593702443114569974:2338] 2026-01-10T12:03:47.881349Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593702443114569971:2338][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7593702438819602270:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:47.881374Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7593702438819601968:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7593702443114569975:2338] 2026-01-10T12:03:47.881389Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7593702438819601971:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7593702443114569976:2338] 2026-01-10T12:03:47.881401Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7593702438819601974:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7593702443114569977:2338] 2026-01-10T12:03:47.881451Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7593702438819602270:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-10T12:03:47.881508Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7593702438819602270:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7593702443114569971:2338] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:47.881602Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593702438819602270:2117], cacheItem# { Subscriber: { Subscriber: [2:7593702443114569971:2338] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:47.881675Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593702443114569978:2339], recipient# [2:7593702443114569970:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:47.903594Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2026-01-10T12:03:33.739007Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:33.739139Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:33.813524Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:33.813722Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:33.864865Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:33.880285Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=14634145757633286815, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2026-01-10T12:03:33.880522Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=14634145757633286815) 2026-01-10T12:03:33.881158Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2166], cookie=6631820496150192870, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2026-01-10T12:03:33.881295Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2166], cookie=6631820496150192870) 2026-01-10T12:03:33.881762Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=17358992371262517700, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2026-01-10T12:03:33.881987Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2026-01-10T12:03:33.901367Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=17358992371262517700) 2026-01-10T12:03:33.901957Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2174], cookie=17763849680604518082, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2026-01-10T12:03:33.902189Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2026-01-10T12:03:33.918000Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2174], cookie=17763849680604518082) 2026-01-10T12:03:34.897235Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:34.897350Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:34.949258Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:34.953041Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:34.998994Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:34.999509Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:136:2161], cookie=249485242888107258, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-10T12:03:34.999899Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:03:35.016155Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:136:2161], cookie=249485242888107258) 2026-01-10T12:03:35.016775Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:147:2169], cookie=16715947584126967064, path="/Root/Res", config={ }) 2026-01-10T12:03:35.017017Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-10T12:03:35.041054Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:147:2169], cookie=16715947584126967064) 2026-01-10T12:03:35.043112Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:152:2174]. Cookie: 15730093812024558327. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:35.043205Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:152:2174], cookie=15730093812024558327) 2026-01-10T12:03:35.043797Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:152:2174]. Cookie: 2754420276179207926. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2026-01-10T12:03:35.043855Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:152:2174], cookie=2754420276179207926) 2026-01-10T12:03:38.122351Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:38.122493Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:38.157875Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:38.158014Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:38.197420Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:38.197895Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=4937046218917346821, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:03:38.198239Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:03:38.211833Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=4937046218917346821) 2026-01-10T12:03:38.212449Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=7542679962527517115, path="/Root/Res", config={ }) 2026-01-10T12:03:38.212725Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-10T12:03:38.226326Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=7542679962527517115) 2026-01-10T12:03:38.227186Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2174]. Cookie: 17881481775710343482. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:38.227244Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2174], cookie=17881481775710343482) 2026-01-10T12:03:38.227736Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2174]. Cookie: 752407847794750626. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2026-01-10T12:03:38.227782Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:152:2174], cookie=752407847794750626) 2026-01-10T12:03:41.085477Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:41.085600Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:41.137170Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:41.137303Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:41.154367Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:41.154921Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=4094964286400678215, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:03:41.155262Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:03:41.182015Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=4094964286400678215) 2026-01-10T12:03:41.182952Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 895375023347560028. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:41.183025Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=895375023347560028) 2026-01-10T12:03:41.183512Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 8844344715591298168. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-10T12:03:41.183589Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=8844344715591298168) 2026-01-10T12:03:41.184060Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 10027972762944400757. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-10T12:03:41.184139Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=10027972762944400757) 2026-01-10T12:03:44.307348Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:03:44.307480Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:03:44.357409Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:03:44.357571Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:03:44.398149Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:03:44.398683Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=10289433018711297981, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:03:44.399007Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:03:44.412545Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=10289433018711297981) 2026-01-10T12:03:44.413364Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2169]. Cookie: 4883706533170080461. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:44.413420Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2169], cookie=4883706533170080461) 2026-01-10T12:03:44.413922Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:147:2169]. Cookie: 8629019918579069036. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2026-01-10T12:03:44.413975Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:147:2169], cookie=8629019918579069036) 2026-01-10T12:03:47.306815Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:186:2193]. Cookie: 10103531662459630715. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:47.306892Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:186:2193], cookie=10103531662459630715) 2026-01-10T12:03:47.307376Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:186:2193]. Cookie: 2502383696760064797. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2026-01-10T12:03:47.307420Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:186:2193], cookie=2502383696760064797) 2026-01-10T12:03:49.738002Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:219:2219]. Cookie: 4593665621481185023. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:03:49.738066Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:219:2219], cookie=4593665621481185023) 2026-01-10T12:03:49.738514Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:219:2219]. Cookie: 3331647283813490079. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2026-01-10T12:03:49.738551Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:219:2219], cookie=3331647283813490079) >> TSchemeShardTest::RmDirTwice |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4310, MsgBus: 15118 2026-01-10T12:03:17.417226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702313765939852:2240];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:17.417295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:18.177344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:18.207455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:18.207570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:18.296582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:18.514669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:18.525758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:18.536448Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702313765939633:2081] 1768046597337221 != 1768046597337224 2026-01-10T12:03:18.604349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:18.912310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:18.912336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:18.912344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:18.912449Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:20.009257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:20.033873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:20.147039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.423966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.636803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:20.744624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:22.416551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702313765939852:2240];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:22.416614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:23.346549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702339535745302:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.346684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.347206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702339535745312:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.347250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:23.937626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.000473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.046309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.088684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.129973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.183186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.231627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.313648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:24.386637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702343830713481:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.386722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.389037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702343830713486:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.389123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702343830713487:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.389292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:24.393991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:24.414088Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702343830713490:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:24.484952Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702343830713543:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63711, MsgBus: 32454 2026-01-10T12:03:28.481105Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActo ... cpp:1374: Notification cookie mismatch for subscription [3:7593702406321805191:2081] 1768046618422875 != 1768046618422878 2026-01-10T12:03:38.567119Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:38.567228Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:38.567954Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:38.572217Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:38.589364Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:38.692410Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:38.692435Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:38.692442Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:38.692520Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:38.794609Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:39.241452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:39.246867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:39.255329Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.334112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.509025Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:39.604367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:39.711943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:43.433155Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593702406321805337:2184];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:43.433250Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:43.979021Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702427796643546:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:43.979117Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:43.979619Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702427796643556:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:43.979675Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.082684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.144612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.218144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.297163Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.390513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.465301Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.555100Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.633811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:44.804188Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702432091611734:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.804347Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.804839Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702432091611739:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.804892Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702432091611740:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.804933Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:44.809973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:44.850518Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702432091611743:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:44.930812Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702432091611794:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:47.921551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:48.046809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> TSchemeShardTest::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2026-01-10T12:03:39.360350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702408812368076:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:39.360538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:39.878259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:39.878391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:39.888150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:39.979656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:40.008374Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702408812367956:2081] 1768046619276970 != 1768046619276973 2026-01-10T12:03:40.032593Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:40.254265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:03:40.334487Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702408812368219:2106] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:40.334617Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702413107335824:2269] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:40.334727Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702408812368242:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:40.334836Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702408812368412:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702408812368242:2119], cookie# 1 2026-01-10T12:03:40.336608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702408812368452:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368449:2206], cookie# 1 2026-01-10T12:03:40.336667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702408812368453:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368450:2206], cookie# 1 2026-01-10T12:03:40.336695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702408812368454:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368451:2206], cookie# 1 2026-01-10T12:03:40.336739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702408812367924:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368452:2206], cookie# 1 2026-01-10T12:03:40.336768Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702408812367927:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368453:2206], cookie# 1 2026-01-10T12:03:40.336784Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702408812367930:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702408812368454:2206], cookie# 1 2026-01-10T12:03:40.336889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702408812368452:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812367924:2049], cookie# 1 2026-01-10T12:03:40.336911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702408812368453:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812367927:2052], cookie# 1 2026-01-10T12:03:40.336927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702408812368454:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812367930:2055], cookie# 1 2026-01-10T12:03:40.336973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702408812368412:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812368449:2206], cookie# 1 2026-01-10T12:03:40.337003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702408812368412:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:40.337021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702408812368412:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812368450:2206], cookie# 1 2026-01-10T12:03:40.337043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702408812368412:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:40.337095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702408812368412:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702408812368451:2206], cookie# 1 2026-01-10T12:03:40.337120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702408812368412:2206][/dc-1] Sync cookie mismatch: sender# [1:7593702408812368451:2206], cookie# 1, current cookie# 0 2026-01-10T12:03:40.337193Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702408812368242:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:40.343817Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702408812368242:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702408812368412:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:40.352363Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702408812368242:2119], cacheItem# { Subscriber: { Subscriber: [1:7593702408812368412:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:40.355386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702413107335825:2270], recipient# [1:7593702413107335824:2269], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:40.355474Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702413107335824:2269] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:40.364399Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702408812368242:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:40.364493Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593702408812368242:2119], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-10T12:03:40.364732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593702413107335827:2271][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:03:40.365156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593702408812367924:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593702413107335831:2271] 2026-01-10T12:03:40.365169Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593702408812367924:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:40.365250Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593702408812367924:2049] Subscribe: subscriber# [1:7593702413107335831:2271], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:03:40.365291Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593702408812367927:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593702413107335832:2271] 2026-01-10T12:03:40.365299Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593702408812367927:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:40.365323Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593702408812367927:2052] Subscribe: subscriber# [1:7593702413107335832:2271], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:03:40.365354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593702408812367930:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593702413107335833:2271] 2026-01-10T12:03:40.365364Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593702408812367930:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:03:40.36538 ... ype: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768046630896 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2026-01-10T12:03:51.399921Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593702460950651051:2342], recipient# [3:7593702460950651050:2341], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-10T12:03:51.399958Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7593702460950651050:2341] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:51.400000Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7593702460950651050:2341] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-10T12:03:51.400104Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702460950651050:2341] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-10T12:03:51.400134Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7593702460950651050:2341] txid# 281474976710662 SEND to# [3:7593702460950651049:2340] Source {TEvProposeTransactionStatus Status# 5} 2026-01-10T12:03:51.400890Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7593702456655683358:2106] Handle TEvProposeTransaction 2026-01-10T12:03:51.400907Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7593702456655683358:2106] TxId# 281474976710663 ProcessProposeTransaction 2026-01-10T12:03:51.400937Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [3:7593702456655683358:2106] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7593702460950651053:2344] 2026-01-10T12:03:51.402656Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [3:7593702460950651053:2344] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2ODA4OTgzMSwiaWF0IjoxNzY4MDQ2NjMxLCJzdWIiOiJ1c2VyMiJ9.vXhDQAzzi5LpL-EZ2BCE72-n77VpbL4e3mBJnbb6OUO2FaLOax8gR666n1TKaiIOz9sOND-FQ0q8TCArjs06aiXTJsdUajl2EdAahzvxTV6DsSAT_546ej0c3h6endJPbofw7yx6QaIRrEocyoY6qX5N6qt1pDDs9CEgklzpiupEIuvqYeCWsr-W2DMrqc05W6GaHcmXwEfNB74n252U8gitoVjxqoUM7C9prr86FjebuIt_AL4Hye-gXBH_3PB-lHGGfn_nO4fO8lbRbnjGiDRB5NL0M5ESh4RsyR5eaKe32Rc8ymdpyXW8Ks1buuopLXS344Lvd3tsMr3iu7r70g\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2ODA4OTgzMSwiaWF0IjoxNzY4MDQ2NjMxLCJzdWIiOiJ1c2VyMiJ9.**0\000" PeerName: "" 2026-01-10T12:03:51.402736Z node 3 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [3:7593702460950651053:2344] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:03:51.402757Z node 3 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [3:7593702460950651053:2344] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2026-01-10T12:03:51.402803Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [3:7593702460950651053:2344] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:03:51.402883Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593702456655683365:2109], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:51.402966Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:7593702456655683384:2116][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7593702456655683365:2109], cookie# 10 2026-01-10T12:03:51.403025Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7593702456655683405:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683402:2116], cookie# 10 2026-01-10T12:03:51.403049Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7593702456655683406:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683403:2116], cookie# 10 2026-01-10T12:03:51.403063Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7593702456655683062:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683405:2116], cookie# 10 2026-01-10T12:03:51.403066Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7593702456655683407:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683404:2116], cookie# 10 2026-01-10T12:03:51.403087Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7593702456655683065:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683406:2116], cookie# 10 2026-01-10T12:03:51.403090Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7593702456655683068:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7593702456655683407:2116], cookie# 10 2026-01-10T12:03:51.403123Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7593702456655683405:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683062:2049], cookie# 10 2026-01-10T12:03:51.403143Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7593702456655683406:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683065:2052], cookie# 10 2026-01-10T12:03:51.403160Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7593702456655683407:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683068:2055], cookie# 10 2026-01-10T12:03:51.403200Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7593702456655683384:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683402:2116], cookie# 10 2026-01-10T12:03:51.403221Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:7593702456655683384:2116][/dc-1] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:51.403241Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7593702456655683384:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683403:2116], cookie# 10 2026-01-10T12:03:51.403261Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:7593702456655683384:2116][/dc-1] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:51.403296Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7593702456655683384:2116][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7593702456655683404:2116], cookie# 10 2026-01-10T12:03:51.403301Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593702456655683365:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:51.403312Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7593702456655683384:2116][/dc-1] Sync cookie mismatch: sender# [3:7593702456655683404:2116], cookie# 10, current cookie# 0 2026-01-10T12:03:51.403362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593702456655683365:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7593702456655683384:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768046630896 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:51.403428Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593702456655683365:2109], cacheItem# { Subscriber: { Subscriber: [3:7593702456655683384:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768046630896 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-10T12:03:51.403587Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593702460950651054:2345], recipient# [3:7593702460950651053:2344], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-10T12:03:51.403619Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7593702460950651053:2344] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:51.403670Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7593702460950651053:2344] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-10T12:03:51.403761Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702460950651053:2344] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-10T12:03:51.403787Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7593702460950651053:2344] txid# 281474976710663 SEND to# [3:7593702460950651052:2343] Source {TEvProposeTransactionStatus Status# 5} |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TMLPWriterTests::WriteTwoMessage_OnePartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_TwoPartition >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TPQCDTest::TestUnavailableWithoutClustersList >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] Test command err: 2026-01-10T12:03:39.037585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702407788872564:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:39.037655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:39.482746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:39.482894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:39.487543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:39.644490Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702403493905055:2081] 1768046618988110 != 1768046618988113 2026-01-10T12:03:39.644589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:39.648112Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:39.836554Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702407788872615:2105] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:39.836597Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702407788872897:2263] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:39.836685Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702407788872638:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:39.836769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702407788872806:2208][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702407788872638:2118], cookie# 1 2026-01-10T12:03:39.838299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702407788872851:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872848:2208], cookie# 1 2026-01-10T12:03:39.838345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702407788872852:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872849:2208], cookie# 1 2026-01-10T12:03:39.838362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702407788872853:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872850:2208], cookie# 1 2026-01-10T12:03:39.838408Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702403493905023:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872851:2208], cookie# 1 2026-01-10T12:03:39.838447Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702403493905026:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872852:2208], cookie# 1 2026-01-10T12:03:39.838472Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702403493905029:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702407788872853:2208], cookie# 1 2026-01-10T12:03:39.838513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702407788872851:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702403493905023:2049], cookie# 1 2026-01-10T12:03:39.838542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702407788872852:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702403493905026:2052], cookie# 1 2026-01-10T12:03:39.838559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702407788872853:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702403493905029:2055], cookie# 1 2026-01-10T12:03:39.838594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702407788872806:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702407788872848:2208], cookie# 1 2026-01-10T12:03:39.838616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702407788872806:2208][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:39.838632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702407788872806:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702407788872849:2208], cookie# 1 2026-01-10T12:03:39.838661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702407788872806:2208][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:39.838684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702407788872806:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702407788872850:2208], cookie# 1 2026-01-10T12:03:39.838698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702407788872806:2208][/dc-1] Sync cookie mismatch: sender# [1:7593702407788872850:2208], cookie# 1, current cookie# 0 2026-01-10T12:03:39.838738Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702407788872638:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:39.848644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702407788872638:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702407788872806:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:39.848751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702407788872638:2118], cacheItem# { Subscriber: { Subscriber: [1:7593702407788872806:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:39.851307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702407788872898:2264], recipient# [1:7593702407788872897:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:39.851367Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702407788872897:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:39.896033Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593702407788872897:2263] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:39.906065Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593702407788872897:2263] Handle TEvDescribeSchemeResult Forward to# [1:7593702407788872896:2262] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:39.908114Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593702407788872615:2105] Handle TEvProposeTransaction 2026-01-10T12:03:39.908162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593702407788872615:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:03:39.908236Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593702407788872615:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593702407788872903:2268] 2026-01-10T12:03:39.922886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702407788872638:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615 ... er# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.442139Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593702481003190723:2240] 2026-01-10T12:03:56.442157Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.480266Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7593702459528353929:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:56.480409Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593702459528353929:2107], cacheItem# { Subscriber: { Subscriber: [4:7593702481003190706:2238] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:56.480463Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593702459528353929:2107], cacheItem# { Subscriber: { Subscriber: [4:7593702481003190707:2239] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:56.480586Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593702481003190766:2244], recipient# [4:7593702481003190700:2311], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:56.480998Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7593702481003190700:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:03:56.605912Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593702481003190715:2239] 2026-01-10T12:03:56.606006Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606033Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593702481003190716:2239] 2026-01-10T12:03:56.606058Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606078Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593702481003190717:2239] 2026-01-10T12:03:56.606100Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190707:2239][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606195Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593702481003190721:2240] 2026-01-10T12:03:56.606223Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606242Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593702481003190722:2240] 2026-01-10T12:03:56.606287Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606314Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593702481003190723:2240] 2026-01-10T12:03:56.606337Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190708:2240][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606429Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593702481003190709:2238] 2026-01-10T12:03:56.606452Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606471Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593702481003190710:2238] 2026-01-10T12:03:56.606495Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.606514Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593702481003190711:2238] 2026-01-10T12:03:56.606536Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593702481003190706:2238][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593702459528353929:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2026-01-10T12:03:45.917502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702434813924065:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:45.917555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:45.970217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:46.464254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:46.523049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:46.523171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:46.554621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:46.730517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:46.766588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:03:46.940260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:47.216473Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702434813924147:2143] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:47.216522Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702443403859227:2462] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:47.216631Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702434813924154:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:47.216716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702439108891671:2286][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702434813924154:2145], cookie# 1 2026-01-10T12:03:47.218214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702439108891692:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891689:2286], cookie# 1 2026-01-10T12:03:47.218261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702439108891693:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891690:2286], cookie# 1 2026-01-10T12:03:47.218277Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702439108891694:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891691:2286], cookie# 1 2026-01-10T12:03:47.218313Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702434813923794:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891692:2286], cookie# 1 2026-01-10T12:03:47.218341Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702434813923797:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891693:2286], cookie# 1 2026-01-10T12:03:47.218359Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702434813923800:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702439108891694:2286], cookie# 1 2026-01-10T12:03:47.218418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702439108891692:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702434813923794:2050], cookie# 1 2026-01-10T12:03:47.218448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702439108891693:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702434813923797:2053], cookie# 1 2026-01-10T12:03:47.218467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702439108891694:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702434813923800:2056], cookie# 1 2026-01-10T12:03:47.218506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702439108891671:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702439108891689:2286], cookie# 1 2026-01-10T12:03:47.218533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702439108891671:2286][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:47.218549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702439108891671:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702439108891690:2286], cookie# 1 2026-01-10T12:03:47.218569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702439108891671:2286][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:47.218593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702439108891671:2286][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702439108891691:2286], cookie# 1 2026-01-10T12:03:47.218614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702439108891671:2286][/dc-1] Sync cookie mismatch: sender# [1:7593702439108891691:2286], cookie# 1, current cookie# 0 2026-01-10T12:03:47.218681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702434813924154:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:47.235450Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702434813924154:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702439108891671:2286] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:47.235593Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702434813924154:2145], cacheItem# { Subscriber: { Subscriber: [1:7593702439108891671:2286] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:47.238250Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702443403859228:2463], recipient# [1:7593702443403859227:2462], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:47.238316Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702443403859227:2462] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:47.349369Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593702443403859227:2462] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:47.361314Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593702443403859227:2462] Handle TEvDescribeSchemeResult Forward to# [1:7593702443403859226:2461] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:47.365601Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593702434813924147:2143] Handle TEvProposeTransaction 2026-01-10T12:03:47.365629Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593702434813924147:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:03:47.365682Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593702434813924147:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593702443403859234:2468] 2026-01-10T12:03:47.538995Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593702443403859234:2468] txid# 281474976710657 Bootstrap EvSch ... rkload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593702481486905075:2550] 2026-01-10T12:03:56.547326Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7593702481486905066:2550][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7593702468602002345:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.547349Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593702481486905066:2550][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593702481486905076:2550] 2026-01-10T12:03:56.547374Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593702481486905066:2550][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7593702468602002345:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.547397Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593702481486905083:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702468602002217:2050] 2026-01-10T12:03:56.547456Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593702481486905084:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702468602002220:2053] 2026-01-10T12:03:56.547483Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593702481486905085:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702468602002223:2056] 2026-01-10T12:03:56.547517Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593702481486905067:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702481486905080:2551] 2026-01-10T12:03:56.547545Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593702481486905067:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702481486905081:2551] 2026-01-10T12:03:56.547573Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7593702481486905067:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7593702468602002345:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.547593Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593702481486905067:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593702481486905082:2551] 2026-01-10T12:03:56.547616Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593702481486905067:2551][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7593702468602002345:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:03:56.547638Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002217:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905077:2550] 2026-01-10T12:03:56.547656Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002217:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905083:2551] 2026-01-10T12:03:56.547672Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002220:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905078:2550] 2026-01-10T12:03:56.547687Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002220:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905084:2551] 2026-01-10T12:03:56.547713Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002223:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905079:2550] 2026-01-10T12:03:56.547728Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593702468602002223:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593702481486905085:2551] 2026-01-10T12:03:56.547762Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593702468602002345:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:03:56.547823Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593702468602002345:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593702481486905066:2550] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:56.547884Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593702468602002345:2118], cacheItem# { Subscriber: { Subscriber: [3:7593702481486905066:2550] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:56.547928Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593702468602002345:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:03:56.547968Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593702468602002345:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593702481486905067:2551] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:56.548016Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593702468602002345:2118], cacheItem# { Subscriber: { Subscriber: [3:7593702481486905067:2551] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:56.548248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593702481486905087:2553], recipient# [3:7593702481486905059:2302], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:56.548488Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593702481486905086:2552], recipient# [3:7593702481486905061:2304], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:57.361528Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593702468602002345:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:57.361839Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593702468602002345:2118], cacheItem# { Subscriber: { Subscriber: [3:7593702472896970421:2537] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:03:57.361947Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593702485781872391:2557], recipient# [3:7593702485781872390:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3149, MsgBus: 25175 2026-01-10T12:03:20.484242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702326088036753:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:20.493258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:21.229591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:21.257860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:21.257951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:21.372883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:21.479716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:21.495956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:21.517854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702326088036715:2081] 1768046600424712 != 1768046600424715 2026-01-10T12:03:21.544243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:21.842593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:21.842615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:21.842622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:21.842701Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:23.013892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:23.032690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:23.132149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.393323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.705903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:23.833455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:25.471921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702326088036753:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:25.488592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:27.844893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702356152809688:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.845009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.852241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702356152809698:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:27.852332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:28.562284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.609089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.650831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.685320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.730859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.781847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.855196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:28.956201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:29.094453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702364742745177:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:29.094542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:29.094553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702364742745182:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:29.094833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702364742745184:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:29.094884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:29.098504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:29.112760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702364742745185:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:29.219420Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702364742745237:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:32.587294Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing k ... Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:49.081493Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:49.081518Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:49.081529Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:49.081618Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:49.088186Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:49.528394Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:49.657458Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:49.670225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:49.679613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:49.844416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.053247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.184241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.507734Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593702445418539243:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:53.507856Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:53.548207Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702466893377460:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.548330Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.548864Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702466893377470:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.548937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.632910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.704115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.755665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.808981Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.857304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.946514Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.023437Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.110155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.230350Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702471188345640:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.230473Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.230853Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702471188345645:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.230898Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702471188345646:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.231055Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.236823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:54.264945Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702471188345649:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:54.344023Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702471188345700:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:57.270651Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593702484073247914:2540], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekwnbrscpmgyy9n098ja5tv. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NGY1M2I1NmItMzU3NmI4NjYtZjg3MDJkMjktYTc3MDJhNjU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:03:57.271484Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593702484073247915:2541], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekwnbrscpmgyy9n098ja5tv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NGY1M2I1NmItMzU3NmI4NjYtZjg3MDJkMjktYTc3MDJhNjU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7593702484073247911:2528], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:03:57.271886Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NGY1M2I1NmItMzU3NmI4NjYtZjg3MDJkMjktYTc3MDJhNjU=, ActorId: [3:7593702479778280586:2528], ActorState: ExecuteState, LegacyTraceId: 01kekwnbrscpmgyy9n098ja5tv, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> THiveTest::TestLocalDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut_service/fast/unittest >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] Test command err: Trying to start YDB, gRPC: 4682, MsgBus: 9270 2026-01-10T11:59:34.681625Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701355197904426:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:34.691963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:59:35.117781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:59:35.723176Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:36.245803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:59:36.246237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:59:36.257010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:59:36.257098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:59:36.259570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:59:36.365995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:59:36.367954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593701355197904302:2081] 1768046374563441 != 1768046374563444 2026-01-10T11:59:36.470841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:59:36.470888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:59:36.470901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:59:36.471014Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:59:36.526055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:59:38.114119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:38.633741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:39.736432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701355197904426:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:59:39.736642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:59:39.829579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:41.623543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:42.120649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:47.319572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701411032481101:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:47.319679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:47.320072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701411032481110:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:47.320131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:48.207741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.341412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.428527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.483359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.580967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.722628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:48.868863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:49.359883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:59:49.884686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701419622416602:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:49.884742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:49.886422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701419622416604:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:49.886469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:49.887371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701419622416609:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:59:49.894240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:59:49.908207Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701419622416611:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T11:59:50.048794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593701419622416662:3840] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: ... 5]; 2026-01-10T12:03:43.072826Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:43.300236Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7593702422563609892:2081] 1768046623005053 != 1768046623005056 2026-01-10T12:03:43.320200Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:43.322869Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:43.322995Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:43.324633Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:43.345448Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:43.582697Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:43.589002Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:43.589039Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:43.589055Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:43.589203Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:44.042865Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:45.076453Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:45.101362Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:45.293884Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:45.823638Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:45.966270Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:48.045048Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7593702426858577405:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:48.131904Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:50.589440Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593702456923350157:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.589628Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.590104Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593702456923350166:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.590188Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.747278Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.810120Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.878013Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.965345Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.057411Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.156124Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.359433Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.510118Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.660601Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593702461218318360:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.660768Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.660856Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593702461218318365:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.661260Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593702461218318367:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.661334Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.668985Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:51.693941Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593702461218318368:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:51.793652Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7593702461218318420:3792] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:55.683391Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:03:55.692153Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/federated_query/ut_service/fast/unittest >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TPQCDTest::TestUnavailableWithoutBoth >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableConfig >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> KqpResultSetFormats::ArrowFormat_EmptyBatch >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> KqpResultSetFormats::ArrowFormat_Simple >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> KqpResultSetFormats::DefaultFormat >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |83.4%| [TM] {RESULT} ydb/core/kqp/federated_query/ut_service/fast/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2026-01-10T12:03:57.807956Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702485324779737:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:57.808071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:58.215211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:58.251921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:58.252060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:58.348794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:58.357142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:58.364236Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702485324779711:2081] 1768046637806564 != 1768046637806567 2026-01-10T12:03:58.546772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:58.592848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00211c/r3tmp/yandexNLxMXy.tmp 2026-01-10T12:03:58.592875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00211c/r3tmp/yandexNLxMXy.tmp 2026-01-10T12:03:58.593049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00211c/r3tmp/yandexNLxMXy.tmp 2026-01-10T12:03:58.593204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:58.823491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:26487 2026-01-10T12:03:58.924492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:58.940511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:03:58.957892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:04:02.199108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702506799617157:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:02.199250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702506799617171:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:02.199333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:02.207270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702506799617174:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:02.207374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:02.207855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:02.232323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702506799617173:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:04:02.407258Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702506799617240:2593] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:02.755633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:02.763243Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593702506799617248:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:04:02.766456Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzI3MzExMzYtODQwYWQzOGMtNWViNDRmODItZTZmMmE1YmM=, ActorId: [1:7593702506799617155:2326], ActorState: ExecuteState, LegacyTraceId: 01kekwngzmcjb5x67mmdxvt3f8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:04:02.769107Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:04:02.808300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702485324779737:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:02.808370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:02.888468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:03.018177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2026-01-10T12:03:59.781459Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702492560386817:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:59.795473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:00.427952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:00.428149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:00.428071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:00.542420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:00.611241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:00.657360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702492560386762:2081] 1768046639741248 != 1768046639741251 2026-01-10T12:04:00.677800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:00.793733Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:00.944946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00211b/r3tmp/yandexdz9sxg.tmp 2026-01-10T12:04:00.944979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00211b/r3tmp/yandexdz9sxg.tmp 2026-01-10T12:04:00.945131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00211b/r3tmp/yandexdz9sxg.tmp 2026-01-10T12:04:00.945251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> DataShardStats::BackupTableStatsReportInterval [GOOD] >> DataShardStats::CollectKeySampleLeader >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2026-01-10T12:04:03.410546Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702512602657967:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:03.410601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:04.084256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:04.084401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:04.084756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:04.087041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:04.206282Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702512602657829:2081] 1768046643346798 != 1768046643346801 2026-01-10T12:04:04.226167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:04.275896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:04.283656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:04.283686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:04.283694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:04.283813Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:04.416270Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:07.419546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702529782527993:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:07.419719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:07.420432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702529782528005:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:07.425451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702529782528006:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:07.425525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:07.431980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:07.457195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702529782528009:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-10T12:04:07.647101Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702529782528066:2578] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> KqpScanArrowFormat::AllTypesColumnsCellvec >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 >> TMLPWriterTests::WriteTwoMessage_TwoPartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_Deduplicated >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 11222, MsgBus: 14818 2026-01-10T11:57:39.789813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700863973176690:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:39.789857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:57:40.439875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:40.524874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:57:41.152439Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:41.873716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:41.873827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:41.875802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:41.888977Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593700863973176667:2081] 1768046259764813 != 1768046259764816 2026-01-10T11:57:41.889121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:57:41.889256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:41.913787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:42.003256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:57:42.003288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:57:42.003318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:57:42.003408Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:57:42.261858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:57:42.482490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:42.491823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T11:57:44.792348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593700863973176690:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:44.792404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:57:52.596328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700919807752448:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:52.612669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:52.621385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700919807752457:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:57:52.622664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 1873, MsgBus: 20349 2026-01-10T11:58:02.017352Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:02.020054Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593700958542719484:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:02.034574Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:02.094690Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:02.657181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:02.946585Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:03.132911Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:03.133274Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:03.861805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:04.095010Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:04.147704Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593700958542719328:2081] 1768046281652851 != 1768046281652854 2026-01-10T11:58:04.168607Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:04.279918Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:04.280526Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:04.434058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:05.760532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:06.027285Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:58:06.027305Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:58:06.027683Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:58:06.028829Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:06.776747Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593700958542719484:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:06.777145Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:08.586575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:08.609857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T11:58:10.757349Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593700997197425911:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:10.757454Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:10.757738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593700997197425920:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:10.757783Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 25218, MsgBus: 15024 2026-01-10T11:58:11.500977Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593700999607221431:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:11.501019Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migra ... b" '"/Root/seasons" '"Select")) (let $26 '($23 $24 $25)) (let $27 '('('"mode" '"flush"))) (let $28 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($22 '() '0 '0)) (KiEffects) $26 '())) $27 (Void))) (let $29 (DataSink 'result)) (let $30 (ResPull! (Left! $28) $29 (Key) (Nth (Right! $28) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $30 $29) $1 $27)) ) KqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/count_episodes_with_titles"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) '((Bool 'true) (Bool 'true)) (lambda '($6) '((PersistableRepr (Member $6 '"series")) (PersistableRepr (Member $6 '"season")))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (DataSource '"kikimr" '"db")) (let $3 '('"season_id" '"series_id")) (let $4 (AggApply 'count_all (StructType) (lambda '($31) (Void)))) (let $5 (Aggregate (ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/episodes"))) (Void) '())) $3) $3 '('('Count0 $4)) '())) (let $6 '('"season_id" '"series_id" '"title")) (let $7 '('Inner '"episodes" '"series" '('"episodes" '"series_id") '('"series" '"series_id") '())) (let $8 '('"episodes" '"series_id" '"episodes" '"season_id")) (let $9 '('"seasons" '"series_id" '"seasons" '"season_id")) (let $10 '('Inner $7 '"seasons" $8 $9 '())) (let $11 '('"rename" '"episodes.Count0" '"episode_count")) (let $12 '('"rename" '"episodes.season_id" '"")) (let $13 '('"rename" '"episodes.series_id" '"")) (let $14 '('"rename" '"seasons.season_id" '"")) (let $15 '('"rename" '"seasons.series_id" '"")) (let $16 '('"rename" '"seasons.title" '"season")) (let $17 '('"rename" '"series.series_id" '"")) (let $18 '('"rename" '"series.title" '"series")) (let $19 '($11 $12 $13 $14 $15 $16 $17 $18)) (let $20 (EquiJoin '($5 '"episodes") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/series"))) (Void) '())) '('"series_id" '"title")) '"series") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/seasons"))) (Void) '())) $6) '"seasons") $10 $19)) (let $21 (Bool 'true)) (let $22 (Sort $20 '($21 $21) (lambda '($32) '((Member $32 '"series") (Member $32 '"season"))))) (let $23 '('"db" '"/Root/episodes" '"Select")) (let $24 '('"db" '"/Root/series" '"Select")) (let $25 '('"db" '"/Root/seasons" '"Select")) (let $26 '($23 $24 $25)) (let $27 '('('"mode" '"flush"))) (let $28 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($22 '() '0 '0)) (KiEffects) $26 '())) $27 (Void))) (let $29 (DataSink 'result)) (let $30 (ResPull! (Left! $28) $29 (Key) (Nth (Right! $28) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $30 $29) $1 $27)) ) KqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes_with_titles")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes_with_titles" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 19561, MsgBus: 24659 2026-01-10T12:04:01.285823Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:04:01.360247Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:01.570396Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:01.570585Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:01.576000Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7593702501921852141:2081] 1768046641150461 != 1768046641150464 2026-01-10T12:04:01.591829Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:01.601301Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:01.608219Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:01.817130Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:01.817173Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:01.817192Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:01.817351Z node 23 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:02.017966Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:02.240508Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:03.594446Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:10.794234Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7593702540576558709:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.794437Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.794979Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7593702540576558719:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.795085Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.903230Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7593702540576558739:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.903405Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.904342Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7593702540576558744:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.904460Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7593702540576558745:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.904563Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.923303Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:10.950624Z node 23 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7593702540576558748:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:04:11.011555Z node 23 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [23:7593702544871526095:2571] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2026-01-10T12:02:20.301356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:02:20.342219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:02:20.342573Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:02:20.350940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:02:20.351225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:02:20.351463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:02:20.351584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:02:20.351716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:02:20.351821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:02:20.351949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:02:20.352054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:02:20.352210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:02:20.352351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:02:20.352478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:02:20.352621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:02:20.352740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:02:20.453310Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:02:20.453938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:02:20.454003Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:02:20.454398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:20.454595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:02:20.454672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:02:20.454721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:02:20.454865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:02:20.454960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:02:20.455006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:02:20.455039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:02:20.455239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:02:20.455344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:02:20.455398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:02:20.455443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:02:20.455550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:02:20.455642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:02:20.455692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:02:20.455745Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:02:20.455801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:02:20.455842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:02:20.456112Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:02:20.456175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:02:20.456217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:02:20.456251Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:02:20.456564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:02:20.456624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:02:20.456660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:02:20.456794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:02:20.456848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:02:20.456880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:02:20.456949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:02:20.457028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:02:20.457067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:02:20.457127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:02:20.457166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:02:20.457203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:02:20.457392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:02:20.457461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... icWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:03:52.768149Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:03:52.768232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-10T12:03:52.768321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:03:52.768463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:03:52.768614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.768669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-10T12:03:52.768741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.777911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.778100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.778329Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:52.778448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.631500s; 2026-01-10T12:03:52.778546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:03:52.779297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.779665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-10T12:03:52.779962Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-10T12:03:52.780028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.782843Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[190] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-10T12:03:52.783788Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.784328Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:03:52.800148Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.800248Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-10T12:03:52.800622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::6e6e0206-ee1c11f0-a3b9527e-a37ec9aa; 2026-01-10T12:03:52.800718Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:03:52.800811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-10T12:03:52.800880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:03:52.800965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:03:52.801036Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:52.801097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:03:52.801196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.630000s; 2026-01-10T12:03:52.801304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=6e6e0206-ee1c11f0-a3b9527e-a37ec9aa;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:03:52.801432Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:6171112:0] 2026-01-10T12:03:52.801505Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:6171112:0] 2026-01-10T12:03:52.801549Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6171112:0] 2026-01-10T12:03:52.801592Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:6171112:0] 2026-01-10T12:03:52.801632Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:3:2:0:6171112:0] 2026-01-10T12:03:52.801675Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2026-01-10T12:03:52.801713Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:6171112:0] 2026-01-10T12:03:52.801751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2026-01-10T12:03:52.801802Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-10T12:03:52.801905Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6171112:0] 2026-01-10T12:03:52.801956Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:6171112:0] 2026-01-10T12:03:52.802015Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6171112:0] 2026-01-10T12:03:52.802063Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:6171112:0] 2026-01-10T12:03:52.802120Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:6171112:0] 2026-01-10T12:03:52.802162Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:6171112:0] 2026-01-10T12:03:52.802208Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2026-01-10T12:03:52.802247Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:6171112:0] 2026-01-10T12:03:52.802290Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:6171112:0] 2026-01-10T12:03:52.802337Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-10T12:03:52.802385Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6171112:0] GC for channel 4 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey >> TReplicaTest::Subscribe >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> KqpQuery::TableSinkWithSubquery [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TReplicaTest::SyncVersion [GOOD] >> TVPatchTests::PatchPartPutError |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2026-01-10T12:04:20.262776Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:20.263939Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-10T12:04:20.264003Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-10T12:04:20.264262Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-10T12:04:20.264363Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:20.264534Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2026-01-10T12:04:19.509287Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:19.509360Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:19.509540Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:19.509593Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:19.516209Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:19.516419Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-10T12:04:19.516537Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:19.516697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:19.516740Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:04:19.516800Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:19.812730Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-10T12:04:19.812797Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-10T12:04:19.812879Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:20.100245Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:20.100341Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:20.100492Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 76 2026-01-10T12:04:20.100546Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:20.100615Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2026-01-10T12:04:20.100696Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2026-01-10T12:04:20.100750Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:20.100839Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:8:2055], cookie# 1 |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> test_workload.py::TestYdbMixedWorkload::test[column] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2026-01-10T12:04:20.331412Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:20.332434Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:20.332507Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:20.332782Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-10T12:04:20.332845Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:20.333113Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-10T12:04:20.333183Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-10T12:04:20.333287Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-10T12:04:20.333499Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2026-01-10T12:04:20.333558Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-10T12:04:20.333651Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:53.648786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:53.648883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:53.648928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:53.648967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:53.649025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:53.649060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:53.649142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:53.649230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:53.650160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:53.650502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:53.748003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:53.748107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:53.774543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:53.774664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:53.774877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:53.789239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:53.789537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:53.790309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:53.790563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:53.792972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:53.793179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:53.794330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:53.794393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:53.794496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:53.794559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:53.794611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:53.794858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:53.961662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.962858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.963923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.964000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:53.964062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2, partId: 0 2026-01-10T12:04:20.102952Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-10T12:04:20.103031Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-10T12:04:20.103094Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.103142Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:04:20.103377Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:04:20.103687Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:04:20.113248Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.113517Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.113618Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.113706Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.113809Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.116372Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.116713Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:20.116760Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:04:20.117138Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:20.117186Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [14:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:04:20.117887Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:20.117963Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:04:20.118203Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:04:20.118263Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:20.118350Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:04:20.118416Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:20.118486Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:04:20.118570Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:20.118650Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:04:20.118711Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:04:20.119039Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 10 2026-01-10T12:04:20.119121Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-10T12:04:20.119188Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:04:20.120377Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:20.120490Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:20.120555Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:04:20.120639Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:04:20.120725Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-10T12:04:20.120875Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:04:20.124052Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:04:20.124530Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:04:20.124617Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:04:20.125421Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:04:20.125569Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:04:20.125644Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [14:1455:3279] TestWaitNotification: OK eventTxId 104 2026-01-10T12:04:20.126435Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:20.126750Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 376us result status StatusSuccess 2026-01-10T12:04:20.127629Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 21986, MsgBus: 63832 2026-01-10T12:03:45.373566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702434389072889:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:45.397449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:45.463760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:45.832177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:45.853664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:45.853793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:45.987453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:46.033954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:46.048226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:46.268683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:46.268701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:46.268708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:46.268774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:46.373221Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:46.823888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:46.837519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:46.912551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:47.072366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:47.263803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:47.337788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:49.458063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702451568943901:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:49.458219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:49.464329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702451568943911:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:49.464431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.030485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.076938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.118319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.191913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.244491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.346898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.376447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702434389072889:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:50.376554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:50.408573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.488945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.568623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702455863912083:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.568754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.569513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702455863912088:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.569560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702455863912089:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.569594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:50.573334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:50.592371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702455863912092:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:50.655711Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702455863912143:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19287, MsgBus: 61368 2026-01-10T12:03:53.757348Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exist ... s: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.874359Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.123219Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593702514175872547:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:09.123316Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:09.150259Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.496179Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702535650711153:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.496271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.496548Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702535650711158:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.496619Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593702535650711159:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.496699Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.501094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:09.517440Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593702535650711162:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:04:09.617112Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593702535650711213:3409] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13812, MsgBus: 21611 2026-01-10T12:04:13.476311Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593702552754682570:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:13.476741Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:13.501806Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:13.640575Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:13.644639Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593702552754682540:2081] 1768046653474125 != 1768046653474128 2026-01-10T12:04:13.684269Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:13.684357Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:13.696613Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:13.717797Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:13.844738Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:13.844763Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:13.844773Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:13.844862Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:14.468246Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:14.480325Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:14.486655Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:17.521732Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702569934552606:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.521850Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.522161Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702569934552616:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.522217Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.644681Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:17.762132Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:17.904637Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702569934552785:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.910258Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:17.920021Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702569934552780:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.920168Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.924285Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702569934552815:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.924417Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:17.944599Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702569934552787:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:04:18.020186Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702574229520136:2639] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:18.477002Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702552754682570:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:18.477139Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2026-01-10T12:04:20.210760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:20.210860Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.211012Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:20.211059Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.211162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-10T12:04:20.211201Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.211301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-10T12:04:20.211345Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.211593Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2026-01-10T12:04:20.211676Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:04:20.224999Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.225278Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:9:2056], cookie# 0, event size# 103 2026-01-10T12:04:20.225362Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:04:20.225454Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.225625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-10T12:04:20.225754Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-10T12:04:20.286394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-10T12:04:20.286446Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.286517Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-10T12:04:20.286567Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.286638Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-10T12:04:20.286671Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 900, generation# 1 2026-01-10T12:04:20.286739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-10T12:04:20.286769Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 900, generation# 1 2026-01-10T12:04:20.286864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2026-01-10T12:04:20.286901Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:04:20.286952Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.287074Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:13:2060], cookie# 0, event size# 103 2026-01-10T12:04:20.287109Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2026-01-10T12:04:20.287148Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [1:11:2058] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2026-01-10T12:04:20.287216Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.287307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:11:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:14:2061] 2026-01-10T12:04:20.287351Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:11:2058] Subscribe: subscriber# [1:14:2061], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-10T12:04:20.287671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-10T12:04:20.287703Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.287756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-10T12:04:20.287797Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.287857Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-10T12:04:20.287882Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.287925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-10T12:04:20.287971Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.288041Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2026-01-10T12:04:20.288067Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:04:20.288135Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.288227Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:17:2064], cookie# 0, event size# 103 2026-01-10T12:04:20.288254Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:04:20.288292Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:04:20.288361Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:15:2062] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:18:2065] 2026-01-10T12:04:20.288395Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:15:2062] Subscribe: subscriber# [1:18:2065], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-10T12:04:20.288688Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-10T12:04:20.288727Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:19:2066] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:20.288774Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:19:2066] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-10T12:04:20.288796Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:19:2066] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:20.288850Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Gener ... DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-10T12:04:21.541750Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-10T12:04:21.541825Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-10T12:04:21.541892Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-10T12:04:21.541955Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-10T12:04:21.542023Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-10T12:04:21.542049Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-10T12:04:21.542108Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-10T12:04:21.542135Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-10T12:04:21.542223Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 64 2026-01-10T12:04:21.542256Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-10T12:04:21.542281Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-10T12:04:21.542737Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:401:2448], cookie# 0, event size# 130 2026-01-10T12:04:21.542775Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2026-01-10T12:04:21.542805Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:399:2446] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2026-01-10T12:04:21.542896Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:399:2446] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:402:2449] 2026-01-10T12:04:21.542933Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:04:21.542986Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:399:2446] Subscribe: subscriber# [2:402:2449], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-10T12:04:21.545735Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-10T12:04:21.545807Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-10T12:04:21.545863Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-10T12:04:21.545902Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-10T12:04:21.545964Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-10T12:04:21.545999Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-10T12:04:21.546083Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-10T12:04:21.546117Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-10T12:04:21.546206Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2026-01-10T12:04:21.546242Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-10T12:04:21.546271Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-10T12:04:21.546348Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:405:2452], cookie# 0, event size# 64 2026-01-10T12:04:21.546378Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-10T12:04:21.546450Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:403:2450] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:406:2453] 2026-01-10T12:04:21.546483Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:04:21.546567Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:403:2450] Subscribe: subscriber# [2:406:2453], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-10T12:04:21.690922Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:21.690991Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-10T12:04:21.691110Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:21.691160Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 800, generation# 1 2026-01-10T12:04:21.691269Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-10T12:04:21.691313Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 900, generation# 1 2026-01-10T12:04:21.691377Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-10T12:04:21.691410Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 900, generation# 1 2026-01-10T12:04:21.691523Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 118 2026-01-10T12:04:21.691564Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2026-01-10T12:04:21.691708Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-10T12:04:21.691842Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:9:2056], cookie# 0, event size# 117 2026-01-10T12:04:21.691888Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2026-01-10T12:04:21.691939Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [3:7:2054] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2026-01-10T12:04:21.691980Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2026-01-10T12:04:21.692043Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-10T12:04:21.692269Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-10T12:04:21.692333Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus |83.5%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2026-01-10T12:04:23.102616Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:23.103663Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-10T12:04:23.103733Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-10T12:04:23.103986Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:23.104186Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2026-01-10T12:04:23.104270Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 >> Worker::Basic >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2026-01-10T12:04:27.398778Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:27.399577Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-10T12:04:27.399625Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-10T12:04:27.399813Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-10T12:04:27.399897Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:27.400041Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TVPatchTests::FindingPartsWhenSeveralPartsExist |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |83.6%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.6%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23954, MsgBus: 28768 2026-01-10T12:03:48.810065Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702445634024676:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:48.810138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:49.337460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:49.354108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:49.354244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:49.576722Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702445634024423:2081] 1768046628721399 != 1768046628721402 2026-01-10T12:03:49.589511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:49.598830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:49.608780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:49.804184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:49.896821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:49.896849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:49.896858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:49.897003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:50.890393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:50.908313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:53.391417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861800:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.391550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.391885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861810:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.391936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.684821Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702467108861823:2509] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2026-01-10T12:03:53.718444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861831:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.718571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.719033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861834:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.719094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.733497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.803952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702445634024676:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:53.804017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:53.869172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861929:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.869237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.869634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467108861932:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.869714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.888693Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702467108861945:2582] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } Trying to start YDB, gRPC: 27840, MsgBus: 2768 2026-01-10T12:03:54.907846Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702471282541650:2109];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:54.944741Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:54.976163Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:55.092196Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702471282541581:2081] 1768046634881971 != 1768046634881974 2026-01-10T12:03:55.157083Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:55.165802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:55.165891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:55.189826Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:55.323390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:55.396633Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:55.396660Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:55.396670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:55.396749Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:55.829186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:55.839300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:55.898126Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:58.622844Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593702488462411652:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:58.622940Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:58.623540Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:75937024884 ... 474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.466232Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.467192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.471831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.473104Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.474047Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.474866Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.481235Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037961;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.481342Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.482308Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.482650Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.483209Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.483517Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.484837Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.485704Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.486688Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.487584Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.494884Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.500764Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.502167Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.503612Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:04:24.780408Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:04:24.780437Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> TVPatchTests::FindingPartsWhenError [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2026-01-10T12:04:28.731085Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:28.731926Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:28.731998Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:28.732491Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-10T12:04:28.732834Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-10T12:04:28.732931Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-10T12:04:28.933875Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2026-01-10T12:04:28.948331Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:735} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2026-01-10T12:04:28.948449Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-10T12:04:28.948566Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> TVPatchTests::PatchPartGetError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2026-01-10T12:04:29.320875Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:29.322322Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2026-01-10T12:04:29.322415Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-10T12:04:29.322531Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::DefaultStorageConfig |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpExplain::UpdateConditional+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2026-01-10T12:04:29.695515Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:29.696995Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:29.697067Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-10T12:04:29.697176Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-10T12:04:30.023746Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:30.024714Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:30.024789Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:30.024995Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-10T12:04:30.025078Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-10T12:04:30.025188Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2026-01-10T12:04:30.461168Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:30.462628Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:30.462706Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:30.462950Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-10T12:04:30.463013Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:30.463241Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-10T12:04:30.463326Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> TargetDiscoverer::InvalidCredentials |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TargetDiscoverer::Dirs >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24922, MsgBus: 31066 2026-01-10T12:03:48.485124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702446345827770:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:48.486084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:48.524778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:49.057299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:49.107541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:49.107658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:49.123131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:49.247444Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:49.466328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:49.500602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:49.500993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:49.501007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:49.501014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:49.501082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:50.202808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:50.292306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.512554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.736678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.828148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.086648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467820665991:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.086805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.087188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702467820666001:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.087268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.476067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702446345827770:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:53.476177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:53.521632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.576302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.642539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.705378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.747542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.814623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.889400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.943218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.065072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472115634182:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.065189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.065629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472115634187:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.065629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472115634188:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.065704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.070071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:54.083234Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702472115634191:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:54.192245Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702472115634243:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:56.915396Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593702480705569179:2546], TxId: 281474976715673, task: 1. Ctx: { CheckpointId : . TraceId : 01kekwnbee5njq064bn8p587mw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1 ... " issue_code: 1030 severity: 1 issues { position { row: 3 column: 84 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 84 } severity: 1 issues { position { row: 3 column: 84 } message: "Column \'NonExistentColumn\' does not exist in table \'/Root/KeyValue\'." end_position { row: 3 column: 84 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 9479, MsgBus: 5826 2026-01-10T12:04:20.575524Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593702585627838152:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:20.575739Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:20.598576Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:20.784595Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:20.790179Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:20.790267Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:20.806205Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:20.875648Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:21.035051Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:21.035076Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:21.035087Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:21.035175Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:21.584281Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:21.854638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:21.863640Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:21.878321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:21.965292Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:22.190651Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:22.297770Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.237775Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702607102676323:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:25.237883Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:25.238207Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702607102676333:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:25.238257Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:25.505284Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.559445Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.575750Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702585627838152:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:25.575819Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:25.618315Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.726187Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.788243Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.882337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:25.959971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:26.031528Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:26.136469Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702611397644506:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:26.136579Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:26.136670Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702611397644511:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:26.136891Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702611397644513:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:26.136942Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:26.140335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:26.155323Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702611397644514:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:26.243771Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702611397644568:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [] |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> TSchemeShardTest::DefaultStorageConfig [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TargetDiscoverer::Basic >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest >> TargetDiscoverer::IndexedTable >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> TargetDiscoverer::Transfer >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> TargetDiscoverer::SystemObjects >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2026-01-10T12:03:33.734102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:33.734211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:33.734260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:33.734301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:33.734376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:33.734411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:33.734472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:33.734593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:33.735600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:33.735925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:33.863376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:33.863448Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:33.883215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:33.883347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:33.883528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:33.893847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:33.894138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:33.894974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:33.895998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:33.898851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:33.899060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:33.900332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:33.900399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:33.900613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:33.900662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:33.900831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:33.901025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:34.224608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.225687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.225835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.225938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.226981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rts: 2/3 2026-01-10T12:04:32.865363Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:2 progress is 2/3 2026-01-10T12:04:32.865401Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2026-01-10T12:04:32.865460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-10T12:04:32.867824Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:32.867869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:04:32.867899Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-10T12:04:32.867986Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1225:3009] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2026-01-10T12:04:32.868140Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [7:251:2161], Recipient [7:1225:3009]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2026-01-10T12:04:32.868186Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-10T12:04:32.868217Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 104 datashard 72075186233409549 state Ready 2026-01-10T12:04:32.868290Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409549 Got TEvSchemaChangedResult from SS at 72075186233409549 2026-01-10T12:04:32.868575Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:251:2161], Recipient [7:251:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:04:32.868624Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:04:32.868689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:32.868731Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:04:32.868805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:04:32.868834Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:04:32.868864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:04:32.868903Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:04:32.868937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:04:32.868968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-10T12:04:32.869037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:976:2796] message: TxId: 104 2026-01-10T12:04:32.869096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:04:32.869140Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:04:32.869181Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:04:32.869321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-10T12:04:32.869366Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-10T12:04:32.869387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-10T12:04:32.869419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-10T12:04:32.869443Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-10T12:04:32.869464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-10T12:04:32.869508Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-10T12:04:32.872104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:04:32.872274Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:04:32.872382Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:976:2796] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2026-01-10T12:04:32.872590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:04:32.872641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1277:3046] 2026-01-10T12:04:32.872857Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1279:3048], Recipient [7:251:2161]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:32.872904Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:32.872932Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-10T12:04:32.876692Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:950:2104], Recipient [7:251:2161] 2026-01-10T12:04:32.876758Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:04:32.879403Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:32.879819Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:04:32.879878Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:04:32.880135Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:04:32.882663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:32.883026Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2026-01-10T12:04:32.883120Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:04:32.883688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:04:32.883744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:04:32.884178Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1353:3122], Recipient [7:251:2161]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:32.884247Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:32.884293Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:04:32.884483Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:976:2796], Recipient [7:251:2161]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2026-01-10T12:04:32.884525Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:04:32.884700Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:04:32.884828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:04:32.884873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1351:3120] 2026-01-10T12:04:32.885066Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1353:3122], Recipient [7:251:2161]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:32.885108Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:32.885153Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TargetDiscoverer::RetryableError |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |83.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection >> TargetDiscoverer::Negative >> TMLPWriterTests::WriteTwoMessage_Deduplicated [GOOD] >> TMLPWriterTests::Deduplicated_Reboot >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV2 >> TargetDiscoverer::InvalidCredentials [GOOD] >> Worker::Basic [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TargetDiscoverer::Dirs [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:57.837014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:57.837106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:57.837142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:57.837176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:57.837211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:57.837240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:57.837304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:57.837427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:57.838304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:57.838635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:57.966344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:57.967245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:58.010136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:58.010247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:58.010440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:58.018395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:58.018677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:58.019406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:58.019665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:58.022012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:58.022226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:58.023384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:58.023436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:58.023521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:58.023562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:58.023603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:58.023838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:58.273334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.274694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.274823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.274910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:58.275770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... status: StatusAccepted 2026-01-10T12:04:35.887141Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-10T12:04:35.887216Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2026-01-10T12:04:35.887290Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:04:35.887452Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:04:35.906476Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-10T12:04:35.906657Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:820:2789], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:04:35.907358Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2026-01-10T12:04:35.907493Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:04:35.907726Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2026-01-10T12:04:35.907769Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2026-01-10T12:04:35.907813Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000043 2026-01-10T12:04:35.908173Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:35.908294Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 73014446191 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:35.908366Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000043 2026-01-10T12:04:35.908458Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-10T12:04:35.917706Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-10T12:04:35.917786Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-10T12:04:35.917894Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:04:35.917934Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:04:35.917982Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:04:35.918017Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:04:35.918063Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:04:35.918143Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:127:2151] message: TxId: 281474976710760 2026-01-10T12:04:35.918206Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:04:35.918246Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:04:35.918278Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:04:35.918367Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:04:35.927622Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:04:35.927715Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:04:35.927793Z node 17 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710760 2026-01-10T12:04:35.927929Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:820:2789], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:04:35.934665Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-10T12:04:35.934835Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:820:2789], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:04:35.934943Z node 17 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:04:35.941123Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-10T12:04:35.941291Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:820:2789], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:04:35.941361Z node 17 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-10T12:04:35.941584Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:04:35.941657Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:909:2867] TestWaitNotification: OK eventTxId 103 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2026-01-10T12:04:31.576554Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702630349605652:2264];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:31.584311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:31.635575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:32.046850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:32.046928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:32.138735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:32.139702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:32.164714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:32.367260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:32.532389Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:32.642340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:32.642367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:32.642375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:32.642476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:33.088910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:33.101130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:33.585126Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user } } } 2026-01-10T12:04:33.585224Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user }, iteration# 0 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2026-01-10T12:04:26.499448Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702610795335847:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:26.499515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:26.551071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:27.225572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:27.225677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:27.244607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:27.318271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:27.343838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:27.344330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702610795335637:2081] 1768046666427518 != 1768046666427521 2026-01-10T12:04:27.473777Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:27.526426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:27.645064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:27.645090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:27.645098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:27.645235Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:27.933843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:28.115134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:28.237909Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handshake: worker# [1:7593702619385271250:2608] 2026-01-10T12:04:28.238379Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handshake: worker# [1:7593702619385271250:2608] 2026-01-10T12:04:28.239188Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:04:28.239979Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:04:28.240042Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Send handshake: worker# [1:7593702619385271250:2608] 2026-01-10T12:04:28.240149Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-10T12:04:28.240213Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:173: [Worker][1:7593702619385271250:2608] Handshake with writer: sender# [1:7593702619385271252:2608] 2026-01-10T12:04:28.243294Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Create read session: session# [1:7593702619385271255:2487] 2026-01-10T12:04:28.243362Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-10T12:04:28.243383Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7593702619385271250:2608] Handshake with reader: sender# [1:7593702619385271251:2608] 2026-01-10T12:04:28.243426Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:28.260444Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_17689111607033344005_v1 } } 2026-01-10T12:04:28.286481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:30.484202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702627975206031:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.484205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702627975206018:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.484286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702627975206030:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.484301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.487705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702627975206040:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.487811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:30.488822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:30.551557Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702627975206041:2685] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:04:30.562420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702627975206038:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:04:30.562483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702627975206039:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:04:30.613619Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702627975206089:2717] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:30.635739Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702627975206107:2725] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:31.480115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702610795335847:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:31.480265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=t ... pp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:04:35.097880Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-10T12:04:35.085000Z WriteTime: 2026-01-10T12:04:35.089000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-10T12:04:35.097990Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-10T12:04:35.085000Z WriteTime: 2026-01-10T12:04:35.089000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.134253Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-10T12:04:35.085000Z WriteTime: 2026-01-10T12:04:35.089000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.134539Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2026-01-10T12:04:35.134673Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:04:35.134725Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-10T12:04:35.134808Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-10T12:04:35.143114Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:04:35.143187Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-10T12:04:35.143237Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2026-01-10T12:04:35.143318Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.143372Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.360834Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-10T12:04:35.340000Z WriteTime: 2026-01-10T12:04:35.344000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-10T12:04:35.360920Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-10T12:04:35.340000Z WriteTime: 2026-01-10T12:04:35.344000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.361012Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-10T12:04:35.340000Z WriteTime: 2026-01-10T12:04:35.344000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.361144Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2026-01-10T12:04:35.361265Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-10T12:04:35.362888Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:04:35.362956Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-10T12:04:35.362991Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-10T12:04:35.363046Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.363099Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.623537Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-10T12:04:35.603000Z WriteTime: 2026-01-10T12:04:35.604000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-10T12:04:35.623616Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-10T12:04:35.603000Z WriteTime: 2026-01-10T12:04:35.604000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.623678Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-10T12:04:35.603000Z WriteTime: 2026-01-10T12:04:35.604000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-10T12:04:35.623774Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2026-01-10T12:04:35.623858Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-10T12:04:35.626096Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7593702649450043281:2608] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:04:35.626169Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-10T12:04:35.626205Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7593702619385271252:2608] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2026-01-10T12:04:35.626255Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.626304Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:04:35.788795Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2026-01-10T12:04:35.788844Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7593702619385271251:2608] Leave 2026-01-10T12:04:35.788949Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:267: [Worker][1:7593702619385271250:2608] Reader has gone: sender# [1:7593702619385271251:2608]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2026-01-10T12:04:35.789021Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7593702649450043445:2608] Handshake: worker# [1:7593702619385271250:2608] 2026-01-10T12:04:35.805517Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7593702649450043445:2608] Create read session: session# [1:7593702649450043446:2487] 2026-01-10T12:04:35.805592Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7593702619385271250:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-10T12:04:35.805608Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7593702619385271250:2608] Handshake with reader: sender# [1:7593702649450043445:2608] 2026-01-10T12:04:35.805642Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593702649450043445:2608] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly >> TargetDiscoverer::IndexedTable [GOOD] >> TargetDiscoverer::Transfer [GOOD] >> TargetDiscoverer::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2026-01-10T12:04:32.112614Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702635937519732:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:32.118122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:32.632019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:32.632136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:32.739855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:32.816452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:32.817774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702635937519683:2081] 1768046672020715 != 1768046672020718 2026-01-10T12:04:32.817827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:33.068768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:33.068793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:33.068800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:33.068880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:33.102662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:33.110752Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:33.414514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:33.421417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:33.480929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:33.718079Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046673463, tx_id: 1 } } } 2026-01-10T12:04:33.718120Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-10T12:04:33.741562Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046672868, tx_id: 281474976715657 } }, { name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046673477, tx_id: 281474976710658 } }] } } 2026-01-10T12:04:33.741595Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-10T12:04:33.758483Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046673603, tx_id: 281474976710659 } }] } } 2026-01-10T12:04:33.758521Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2026-01-10T12:04:36.797674Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046673603, tx_id: 281474976710659 } } } 2026-01-10T12:04:36.797699Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2026-01-10T12:04:36.797729Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::RetryableError [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2026-01-10T12:04:33.757867Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702638656266044:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:33.757906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:34.113858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:34.217641Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702638656266019:2081] 1768046673755897 != 1768046673755900 2026-01-10T12:04:34.228975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:34.244107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:34.244192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:34.278950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:34.360167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:34.645946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:34.645965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:34.645972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:34.646067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:34.778097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:35.031236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:35.038061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:04:35.042689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.646665Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046675080, tx_id: 1 } } } 2026-01-10T12:04:35.646688Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-10T12:04:35.663375Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046674324, tx_id: 281474976710657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046675465, tx_id: 281474976715658 } }] } } 2026-01-10T12:04:35.663407Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-10T12:04:38.261824Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046675465, tx_id: 281474976715658 } } } 2026-01-10T12:04:38.261849Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-10T12:04:38.261876Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2026-01-10T12:04:38.261961Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:142: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> KqpExplain::MultiJoinCteLinks [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> DataShardStats::CollectKeySampleLeader [GOOD] >> DataShardStats::CollectKeySampleFollower >> TargetDiscoverer::SystemObjects [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2026-01-10T12:04:34.378786Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702646728353660:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:34.378838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:34.430875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:34.712160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:34.715329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:34.715430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:34.864533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702646728353520:2081] 1768046674346248 != 1768046674346251 2026-01-10T12:04:34.883121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:34.888168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:34.982356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:35.340775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:35.340801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:35.340813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:35.340892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:35.404164Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:36.071600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:36.308671Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1768046676249, tx_id: 281474976710658 } } } 2026-01-10T12:04:36.308705Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2026-01-10T12:04:36.349926Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:168: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:04:36.349965Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:183: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2026-01-10T12:04:36.350005Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:193: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> TargetDiscoverer::Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError [GOOD] Test command err: 2026-01-10T12:04:35.444169Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702650079626324:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:35.444769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:35.546649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:36.084192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:36.154672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:36.154758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:36.269750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:36.280137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:36.283464Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702650079626114:2081] 1768046675414283 != 1768046675414286 2026-01-10T12:04:36.352746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:36.440865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:36.760871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:36.760896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:36.760902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:36.761012Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:37.095141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:37.101895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:37.107005Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-0 } } } 2026-01-10T12:04:37.107049Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-0 }, iteration# 0 2026-01-10T12:04:37.107141Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-1 } } } 2026-01-10T12:04:37.107153Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-1 }, iteration# 1 2026-01-10T12:04:37.107185Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-2 } } } 2026-01-10T12:04:37.107196Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-2 }, iteration# 2 2026-01-10T12:04:37.107238Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-3 } } } 2026-01-10T12:04:37.107284Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-3 }, iteration# 3 2026-01-10T12:04:37.107339Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-4 } } } 2026-01-10T12:04:37.107352Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-4 }, iteration# 4 2026-01-10T12:04:37.107386Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-5 } } } 2026-01-10T12:04:37.107397Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-5 }, iteration# 5 |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2026-01-10T12:04:33.592965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702642375104843:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:33.593062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:34.289279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:34.289381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:34.341385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:34.372038Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:34.379049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702642375104645:2081] 1768046673490932 != 1768046673490935 2026-01-10T12:04:34.388880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:34.538635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:34.595241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:34.848648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:34.848674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:34.848680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:34.848752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:35.259260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:35.269810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.668248Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046675311, tx_id: 1 } } } 2026-01-10T12:04:35.668286Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-10T12:04:35.696476Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046674457, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046675465, tx_id: 281474976710658 } }] } } 2026-01-10T12:04:35.696516Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-10T12:04:38.595697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702642375104843:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:38.596676Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:38.652541Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046675465, tx_id: 281474976710658 } } } 2026-01-10T12:04:38.652569Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-10T12:04:38.652613Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 6559, MsgBus: 7037 2026-01-10T12:03:48.011543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702446862656369:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:48.013769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:48.135439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:49.020369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:49.020499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:49.021044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:49.021162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:49.031224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:49.055936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:49.088206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702442567688840:2081] 1768046627927949 != 1768046627927952 2026-01-10T12:03:49.344150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:49.356741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:49.356759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:49.356765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:49.356840Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:50.251297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:50.262722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:50.411571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.663819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.882500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.032521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.010500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702446862656369:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:53.010576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:53.610381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702468337494500:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.610570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.610885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702468337494510:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:53.610918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.204648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.249589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.311370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.363000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.406550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.456151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.533726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.584652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.699788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472632462679:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.699865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.700105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472632462685:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.700147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702472632462684:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.700221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:54.706350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:54.726514Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702472632462688:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:54.810670Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702472632462739:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPa ... art proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:30.985060Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:31.254048Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:31.364904Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:34.588216Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593702625258374021:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:34.588311Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:34.919398Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702646733212267:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:34.919522Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:34.920053Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702646733212277:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:34.920149Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.064769Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.128942Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.179111Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.220421Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.322987Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.398396Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.487921Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.609827Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:35.795500Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702651028180449:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.795612Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.796145Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702651028180454:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.796201Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702651028180455:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.796539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:35.801824Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:35.823660Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593702651028180458:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:35.900204Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593702651028180509:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"E-Size":"0","PlanNodeId":6,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":6}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":3,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":11,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2026-01-10T12:04:34.773460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702646516435739:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:34.773854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:35.511503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:35.511626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:35.595072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:35.666485Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:35.670454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:35.696226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702646516435611:2081] 1768046674765877 != 1768046674765880 2026-01-10T12:04:35.788381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:35.906235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:36.353203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:36.353224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:36.353230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:36.353315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:36.810594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:36.817674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:36.821106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:37.034971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:04:37.071705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:37.232373Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046676858, tx_id: 1 } } } 2026-01-10T12:04:37.232408Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-10T12:04:37.264316Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046675787, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046676991, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1768046677068, tx_id: 281474976710659 } }] } } 2026-01-10T12:04:37.264352Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-10T12:04:39.772800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702646516435739:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:39.772884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:39.900799Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768046676991, tx_id: 281474976710658 } } } 2026-01-10T12:04:39.900823Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-10T12:04:39.900866Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2026-01-10T12:04:36.336336Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702655380932601:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:36.336390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:36.385870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:36.440581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:36.923765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:36.923955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:36.947021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:37.003223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:37.076700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:37.077886Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702655380932466:2081] 1768046676283427 != 1768046676283430 2026-01-10T12:04:37.344623Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:37.602873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:37.602894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:37.602906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:37.602983Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:38.121271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:38.128973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:38.188903Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-10T12:04:38.188944Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn |83.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::UpdateConditional+UseSink [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |83.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:48.437040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:48.582800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:48.592769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:48.593241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:48.593365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:48.997711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.997858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:49.004325Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046564804215 != 1768046564804218 2026-01-10T12:02:49.019411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.080338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:49.246166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:49.368972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:49.431851Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NDRlZTA2YzAtZmRiNTk5NGYtZmJkNGViMDktZjY2ZGY3YTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NDRlZTA2YzAtZmRiNTk5NGYtZmJkNGViMDktZjY2ZGY3YTQ= temp_dir_name# 0d654a04-413a-426c-a748-71afa3410605 trace_id# 2026-01-10T12:02:49.432531Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NDRlZTA2YzAtZmRiNTk5NGYtZmJkNGViMDktZjY2ZGY3YTQ=, ActorId: [1:843:2738], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:02:49.433032Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NDRlZTA2YzAtZmRiNTk5NGYtZmJkNGViMDktZjY2ZGY3YTQ=, ActorId: [1:843:2738], ActorState: ReadyState, LegacyTraceId: 01kekwk9yr4dsga17rdzyt33at, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpc_actor# [0:0:0] database# database_id# /Root pool_id# default trace_id# 2026-01-10T12:02:49.721853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:846:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:49.722021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:49.722461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:49.722545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:02:49.743993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.794798Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:885:2766], Recipient [1:894:2772]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:49.795753Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:885:2766], Recipient [1:894:2772]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:49.796051Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:894:2772] 2026-01-10T12:02:49.797063Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:49.855120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:885:2766], Recipient [1:894:2772]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:49.855873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:49.856008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:49.857792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:49.857885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:49.857950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:49.858309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:49.858451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:49.858551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:909:2772] in generation 1 2026-01-10T12:02:49.859002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:49.913329Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:49.913567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:49.913696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:911:2782] 2026-01-10T12:02:49.913739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:49.913771Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:49.913812Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:49.914017Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2772], Recipient [1:894:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:49.914087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:49.914437Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:49.914535Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:49.914635Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:49.914674Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:49.914712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:49.914745Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:49.914791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:49.914823Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:49.914864Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:49.914983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2773], Recipient [1:894:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:49.915019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:49.915061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2770], serverId# [1:896:2773], sessionId# [0:0:0] 2026-01-10T12:02:49.915408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:896:2773] 2026-01-10T12:02:49.915453Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:49.915558Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:49.915802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:49.915859Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:49.915959Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:02:49.916007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:02:49.916058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:02:49.916114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSch ... 4:39.534859Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:04:39.534915Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:04:39.535065Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:04:39.535149Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2026-01-10T12:04:39.535193Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-10T12:04:39.535223Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2026-01-10T12:04:39.535267Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:04:39.535311Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:04:39.535348Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-10T12:04:39.535403Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715666] at 72075186224037888 2026-01-10T12:04:39.535438Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-10T12:04:39.535460Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:04:39.535482Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:04:39.535505Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:04:39.535531Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-10T12:04:39.535556Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:04:39.535578Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-10T12:04:39.535602Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-10T12:04:39.535678Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-10T12:04:39.535795Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-10T12:04:39.535896Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-10T12:04:39.535977Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-10T12:04:39.536007Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-10T12:04:39.536029Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:04:39.536051Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-10T12:04:39.536126Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:04:39.536229Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2026-01-10T12:04:39.536255Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:04:39.536281Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:04:39.536305Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:04:39.536347Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-10T12:04:39.536373Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:04:39.536409Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2026-01-10T12:04:39.536465Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:04:39.536497Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-10T12:04:39.536537Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:04:39.537848Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [13:70:2117], Recipient [13:1105:2926]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2026-01-10T12:04:39.841418Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1170:2970], Recipient [13:1105:2926]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:04:39.841763Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:04:39.841869Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-10T12:04:39.841957Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v600/18446744073709551615 2026-01-10T12:04:39.842070Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-10T12:04:39.842234Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:04:39.842318Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:04:39.842390Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:04:39.842447Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:04:39.842512Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-10T12:04:39.842582Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:04:39.842614Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:04:39.842640Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:04:39.842668Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:04:39.842820Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:04:39.843153Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1170:2970], 0} after executionsCount# 1 2026-01-10T12:04:39.843254Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1170:2970], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:04:39.843381Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1170:2970], 0} finished in read 2026-01-10T12:04:39.843484Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:04:39.843511Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:04:39.843537Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:04:39.843565Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:04:39.843621Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:04:39.843643Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:04:39.843681Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-10T12:04:39.843754Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:04:39.843923Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:04:39.845695Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1170:2970], Recipient [13:1105:2926]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:04:39.845785Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists >> KqpStats::SysViewClientLost [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable >> KqpStats::SysViewCancelled >> TExternalTableTest::ReadOnlyMode |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> TExternalTableTest::DropTableTwice >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists >> TExternalTableTest::SchemeErrors |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TExternalTableTest::DropExternalTable >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::CreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7913, MsgBus: 65290 2026-01-10T12:03:46.042285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702440596509565:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:46.042360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:46.085638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:46.616900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:46.617006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:46.709444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:46.714418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:46.714989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:03:46.720481Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702436301542078:2081] 1768046625999389 != 1768046625999392 2026-01-10T12:03:46.916502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:03:47.062130Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:47.092645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:47.092666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:47.092672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:47.092772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:48.327879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:48.338812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:03:48.437009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:48.771572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:49.087787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:49.231356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.036554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702440596509565:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:51.036644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:51.641357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702462071347749:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.641490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.642109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702462071347759:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:51.642265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.070256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.113732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.169524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.204104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.238345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.309102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.369826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.480513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.598660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702466366315938:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.598765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.599308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702466366315943:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.599360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702466366315944:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.599694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.603615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:52.616920Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702466366315947:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:03:52.686119Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702466366316000:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:31.878989Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:31.904064Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:31.904104Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:31.904114Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:31.904210Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:32.434812Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:32.468269Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:32.681772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:32.701384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:32.725192Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:32.825598Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:33.059200Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:33.170318Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:36.428577Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593702629904403017:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:36.428687Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:37.973832Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702655674208558:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:37.973939Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:37.974441Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702655674208567:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:37.974491Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.080999Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.134349Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.190657Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.256588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.335294Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.395146Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.480374Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.581417Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:38.713938Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702659969176763:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.714037Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.714345Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702659969176768:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.714387Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702659969176769:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.714686Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:38.719015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:38.734976Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593702659969176772:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:38.802906Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593702659969176823:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Key (-∞, +∞)","Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:56.389439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:56.389542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:56.389588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:56.389625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:56.389666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:56.389699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:56.389790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:56.389886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:56.390801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:56.391262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:56.487011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:56.487074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:56.525550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:56.540355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:56.540652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:56.579790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:56.580046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:56.580818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:56.581329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:56.604857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:56.605153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:56.606521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:56.606597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:56.606723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:56.606770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:56.606812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:56.607060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:56.799020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.799972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.800949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:56.801507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 046 ParentPathId: 80 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 85 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:44.175222Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:44.175613Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 423us result status StatusSuccess 2026-01-10T12:04:44.176334Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 91 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:44.177788Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:44.178082Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 324us result status StatusSuccess 2026-01-10T12:04:44.178627Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 86 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:44.179855Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:44.180324Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 477us result status StatusSuccess 2026-01-10T12:04:44.180859Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 94 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2026-01-10T11:59:17.481005Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.481090Z node 1 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.481991Z node 1 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.482025Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.498756Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.498829Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.499065Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.499127Z node 2 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-10T11:59:17.499155Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.499557Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.516404Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.516512Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.517336Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.517489Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.517682Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.517732Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.517751Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.518256Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.518284Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.518297Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.518406Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.518468Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.518483Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.518704Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.518760Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-10T11:59:17.518781Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.519137Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.519179Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-10T11:59:17.519201Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.519372Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.537676Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.537768Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.537960Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "/resource" 2026-01-10T11:59:17.538089Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2026-01-10T11:59:17.538119Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2026-01-10T11:59:17.538895Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.539021Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "resource//resource" 2026-01-10T11:59:17.539071Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2026-01-10T11:59:17.539102Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2026-01-10T11:59:17.545168Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.545265Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.545396Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res" 2026-01-10T11:59:17.545491Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.550350Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2026-01-10T11:59:17.550393Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res" 2026-01-10T11:59:17.550422Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res", 42) 2026-01-10T11:59:17.550471Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2026-01-10T11:59:17.556142Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-10T11:59:17.556207Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-10T11:59:17.556442Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res0" 2026-01-10T11:59:17.556531Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.556702Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-10T11:59:17.556731Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res0" 2026-01-10T11:59:17.556767Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res0", 42) 2026-01-10T11:59:17.556847Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2026-01-10T11:59:17.556932Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res1" 2026-01-10T11:59:17.556985Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res1" 2026-01-10T11:59:17.557115Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-10T11:59:17.557136Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res1" 2026-01-10T11:59:17.557158Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res1", 43) 2026-01-10T11:59:17.557186Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-10T11:59:17.557252Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res2" 2026-01-10T11:59:17.557300Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res2" 2026-01-10T11:59:17.557420Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-10T11:59:17.557442Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res2" 2026-01-10T11:59:17.557464Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res2", 44) 2026-01-10T11:59:17.557510Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2026-01-10T11:59:17.557641Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2026-01-10T11:59:17.557666Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2026-01-10T11:59:17.557688Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:662: [/Path/KesusName]: Activate session to "res1". Connected: 1 2026-01-10T11:59:17.558484Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:585: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 25769805828 } }) 2026-01-10T11:59:17.558526Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-10T11:59:17.558698Z node 6 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-10T11:59:17.558719Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-10T11:59:17.558796Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:640: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2026-01-10T11:59:17.558840Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:614: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2026-01-10T11:59:17.559006Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-10T11:59:17.559177Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: ... 2Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.300882Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7593702415008353576:2292]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-10T12:03:42.304437Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-10T12:03:42.304502Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-10T12:03:42.304567Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.308303Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.332199Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-10T12:03:42.332257Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-10T12:03:42.430000Z 2026-01-10T12:03:42.332281Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-10T12:03:42.334245Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-10T12:03:42.334308Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.0003091, QueueWeight: 5 } 2026-01-10T12:03:42.334362Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.335485Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.403819Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7593702415008353576:2292]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-10T12:03:42.406004Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-10T12:03:42.406054Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-10T12:03:42.406116Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.406492Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.430100Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-10T12:03:42.430149Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-10T12:03:42.530000Z 2026-01-10T12:03:42.430172Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-10T12:03:42.430708Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-10T12:03:42.430748Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0003091001202, QueueWeight: 5 } 2026-01-10T12:03:42.430796Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.430957Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.504400Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7593702415008353576:2292]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-10T12:03:42.508571Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-10T12:03:42.508627Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-10T12:03:42.508705Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9996908999, 2)} }]) 2026-01-10T12:03:42.508980Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.534495Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0.9996908999. FreeBalance: 0.9996908999 2026-01-10T12:03:42.534544Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-10T12:03:42.630000Z 2026-01-10T12:03:42.534569Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-10T12:03:42.534620Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:275: Charge "Resource" for 5. Balance: 0.9996908999. FreeBalance: 0.9996908999. TicksToFullfill: 5.001545978. DurationToFullfillInUs: 500154.5978. TimeToFullfill: 2026-01-10T12:03:42.127932Z. Now: 2026-01-10T12:03:42.531484Z. LastAllocated: 2026-01-10T12:03:41.627777Z 2026-01-10T12:03:42.535124Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2026-01-10T12:03:42.535155Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.0003091, QueueWeight: 0 } 2026-01-10T12:03:42.535201Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.535266Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.604095Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-10T12:03:42.604152Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-10T12:03:42.604214Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.603003Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7593702415008353576:2292]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-10T12:03:42.604907Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.631990Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-10T12:03:42.701801Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-10T12:03:42.701858Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-10T12:03:42.701922Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-10T12:03:42.702227Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-10T12:03:42.702273Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-10T12:03:42.701253Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7593702415008353576:2292]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-10T12:03:42.954303Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:42.954342Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:43.966137Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:43.966184Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:44.432989Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[50:7593702406418417831:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:44.433094Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:03:44.544759Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7593702406418417978:2116] 2026-01-10T12:03:44.544798Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7593702406418417978:2116] 2026-01-10T12:03:44.960747Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:44.960787Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:45.528508Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7593702406418417978:2116] 2026-01-10T12:03:45.528543Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7593702406418417978:2116] 2026-01-10T12:03:45.956524Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:45.956559Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:46.976403Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:46.976438Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:47.958090Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:47.958125Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:48.960235Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7593702410713385769:2392] 2026-01-10T12:03:48.960278Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7593702410713385769:2392] 2026-01-10T12:03:49.340202Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [50:7593702406418417815:2094] 2026-01-10T12:03:49.340235Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [50:7593702406418417815:2094] |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |83.8%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/quoter/ut/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TSchemeShardTest::CreateSystemColumn [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TExternalTableTest::ReadOnlyMode [GOOD] >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] |83.9%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::SchemeErrors [GOOD] |83.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:45.278210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:45.278301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.278359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:45.278400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:45.278445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:45.278484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:45.278536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.278603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:45.279529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.279828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:45.492064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:45.492341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:45.493384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.522135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:45.522719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:45.522907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:45.547969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:45.548344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:45.549240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:45.549740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:45.555872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.556101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:45.557407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:45.557494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.557556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:45.557605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:45.557731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:45.558064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:45.927291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.947900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.948948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.949060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.949217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.949300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... etID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:47.018813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-10T12:04:47.018953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:04:47.019167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:04:47.019269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:47.020896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:04:47.021178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:04:47.022655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:47.022700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:47.022872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:04:47.022971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:04:47.023047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.023116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-10T12:04:47.023156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:04:47.023182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:04:47.023410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.023453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:04:47.023560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:04:47.023615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:47.023658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:04:47.023689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:47.023739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:04:47.023781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:04:47.023821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:04:47.023873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:04:47.023956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:04:47.024000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:04:47.024040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:04:47.024090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:04:47.024138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-10T12:04:47.025148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:47.025238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:47.025286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:04:47.025333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:04:47.025395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:04:47.026103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:47.026192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:04:47.026227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:04:47.026256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:04:47.026289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:47.026377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:04:47.037285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:04:47.037413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:04:47.037679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:04:47.037742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:04:47.038222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:04:47.038316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:04:47.038358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:792:2782] TestWaitNotification: OK eventTxId 104 2026-01-10T12:04:47.039036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:47.039347Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 280us result status StatusSuccess 2026-01-10T12:04:47.039752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:45.393977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:45.394089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.394133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:45.394169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:45.394212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:45.394243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:45.394292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.394360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:45.395483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.395792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:45.581240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:45.581310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:45.582124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.599420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:45.599878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:45.600386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:45.625725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:45.626062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:45.626808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:45.627298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:45.635577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.635813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:45.637163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:45.637248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.637310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:45.637384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:45.637498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:45.637741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:45.833454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.834995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:45.835561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:46.909079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:46.909248Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 164us result status StatusSuccess 2026-01-10T12:04:46.909556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-10T12:04:46.909841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-10T12:04:46.909882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-10T12:04:46.909965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-10T12:04:46.909995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-10T12:04:46.910080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-10T12:04:46.910116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-10T12:04:46.910623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:04:46.910815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:04:46.910868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:04:46.910904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:742:2732] 2026-01-10T12:04:46.911066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:04:46.911129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:04:46.911157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:742:2732] 2026-01-10T12:04:46.911253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:04:46.911276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:742:2732] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-10T12:04:46.911836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:46.912022Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 193us result status StatusSuccess 2026-01-10T12:04:46.915163Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-10T12:04:46.925646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:46.925998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-10T12:04:46.926102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2026-01-10T12:04:46.926224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:04:46.934552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-10T12:04:46.934792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:45.444673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:45.444824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.444873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:45.444911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:45.444954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:45.444994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:45.445049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.445112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:45.446037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.446356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:45.628366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:45.628445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:45.629296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.650752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:45.651498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:45.651659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:45.693438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:45.693764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:45.694530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:45.695011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:45.705479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.705698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:45.706978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:45.707060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.707133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:45.707188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:45.707287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:45.707490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.028808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.029967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.030638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... schemeshard: 72057594046678944 2026-01-10T12:04:47.626723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-10T12:04:47.626852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:04:47.627576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.627726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.627772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:04:47.627808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-10T12:04:47.627847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-10T12:04:47.629222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.629293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.629317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:04:47.629343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-10T12:04:47.629375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:04:47.629510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-10T12:04:47.644415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-10T12:04:47.644651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:04:47.646051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-10T12:04:47.652914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:47.653061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:47.653142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-10T12:04:47.653288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-10T12:04:47.653489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:04:47.653579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:04:47.654158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-01-10T12:04:47.656727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:47.656793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:47.656988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:04:47.657103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.657155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:886:2844], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-10T12:04:47.657204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:886:2844], at schemeshard: 72057594046678944, txId: 129, path id: 41 2026-01-10T12:04:47.657287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.657337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-10T12:04:47.657453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:04:47.657491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:04:47.657531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:04:47.657563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:04:47.657595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-10T12:04:47.657634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:04:47.657672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-10T12:04:47.657726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-10T12:04:47.657818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:04:47.657862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-10T12:04:47.657895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:04:47.657934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-10T12:04:47.659100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.659197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.659237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:04:47.659277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:04:47.659344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-10T12:04:47.660419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.660521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:04:47.660553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:04:47.660621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-10T12:04:47.660661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:04:47.660734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-10T12:04:47.664204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-10T12:04:47.664597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |83.9%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:46.794583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:46.794709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.794807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:46.794860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:46.794906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:46.794938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:46.795001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.795089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:46.801026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.801492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:46.996362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:46.996466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:46.997642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:47.019979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:47.020631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:47.020838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:47.034850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:47.035207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:47.036130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:47.036758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:47.041349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.041578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:47.043058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:47.043139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.043196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:47.043248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:47.043372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:47.043624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.204479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.205642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.205827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.205907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.205975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.206917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:04:48.383627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:04:48.383662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:04:48.383688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:04:48.383712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:04:48.384418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:48.384502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:48.384548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:04:48.384602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:04:48.384654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:04:48.386024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:48.386166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:48.386208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:04:48.386239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:04:48.386276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:04:48.386367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:04:48.388655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:04:48.389970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:04:48.390213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:04:48.390256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:04:48.390686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:04:48.390794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.390835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:706:2696] TestWaitNotification: OK eventTxId 101 2026-01-10T12:04:48.391288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.391514Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 267us result status StatusSuccess 2026-01-10T12:04:48.391945Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:04:48.395747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.396265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-10T12:04:48.396364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-10T12:04:48.396415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-10T12:04:48.399336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.399667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:04:48.400000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:04:48.400042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:04:48.400502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:04:48.400612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.400650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:714:2704] TestWaitNotification: OK eventTxId 102 2026-01-10T12:04:48.401071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.401248Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 182us result status StatusPathDoesNotExist 2026-01-10T12:04:48.401461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:46.285012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:46.285103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.285140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:46.285192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:46.285237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:46.285281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:46.285348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.285411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:46.286296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.286585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:46.430754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:46.430826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:46.431682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.447960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:46.448118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:46.448298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:46.456467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:46.456935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:46.457663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:46.458130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:46.460995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.461189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:46.462362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:46.462441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.462555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:46.462623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:46.462685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:46.462875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.680389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.681614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.681742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.681810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.681883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.681948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.682721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.070632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-10T12:04:48.070753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-10T12:04:48.070844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-10T12:04:48.070884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.070904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.071031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-10T12:04:48.071203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-10T12:04:48.071258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.071287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:04:48.071520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.071540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.071594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:04:48.071780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:04:48.071864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.071887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.071950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-10T12:04:48.072047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.072072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.072196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.072227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.072399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-10T12:04:48.072470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.072493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.072602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.072626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.072696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-10T12:04:48.072804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-10T12:04:48.072862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.072883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-01-10T12:04:48.073276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:827:2817] 2026-01-10T12:04:48.073960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.073983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:827:2817] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-01-10T12:04:48.077831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.078054Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 249us result status StatusSuccess 2026-01-10T12:04:48.078402Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:46.595962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:46.596048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.596105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:46.596159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:46.596224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:46.596262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:46.596323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.596441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:46.597411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.597729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:46.739319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:46.739401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:46.740586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.769114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:46.769616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:46.769792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:46.784209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:46.784518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:46.785333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:46.785865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:46.789114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.789319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:46.790654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:46.790722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.790779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:46.790859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:46.790963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:46.791177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.963478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.964991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.965134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.965226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... tate 2026-01-10T12:04:48.202757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:04:48.202795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:04:48.202836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:04:48.202902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:04:48.202946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:04:48.202988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:04:48.203022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:04:48.203053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:04:48.203134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:04:48.203171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:04:48.203215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:04:48.203256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:04:48.204402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:48.204516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:48.204556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:04:48.204607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:04:48.204657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:04:48.205733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:48.205832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:48.205892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:04:48.205920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:04:48.205949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:48.206031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:04:48.210618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:04:48.211503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:04:48.211757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:04:48.211799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:04:48.212267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:04:48.212397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.212437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:729:2719] TestWaitNotification: OK eventTxId 102 2026-01-10T12:04:48.213051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.213297Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 276us result status StatusSuccess 2026-01-10T12:04:48.213652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-10T12:04:48.217353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.217742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-10T12:04:48.217847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2026-01-10T12:04:48.218004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2026-01-10T12:04:48.220798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:04:48.221043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:04:48.221332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:04:48.221403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:04:48.221880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:04:48.221967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:04:48.222001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:737:2727] TestWaitNotification: OK eventTxId 103 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> KeyValueGRPCService::SimpleWriteReadOverrunV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:03:56.976553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:56.976700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:56.976757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:56.976793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:56.976830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:56.976880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:56.976946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:56.977037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:56.977914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:56.978240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:57.073253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:57.073320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:57.089842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:57.089956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:57.090133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:57.099594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:57.099902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:57.100612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:57.101147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:57.103519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:57.103718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:57.104925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:57.104988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:57.105091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:57.105140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:57.105183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:57.105409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.112600Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:03:57.275587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:03:57.275865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.276091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:03:57.276147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:03:57.276315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:03:57.276386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:57.279146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:57.279358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:03:57.279576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.279662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:03:57.279701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:03:57.279738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:03:57.281818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.281885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:03:57.281920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:03:57.284004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.284058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:03:57.284130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:03:57.284202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:03:57.287929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:03:57.289881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:03:57.290103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:03:57.291270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:57.291402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:03:57.291438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:03:57.291784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:03:57.291855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:03:57.292027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:03:57.292156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:03:57.294885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:57.294934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... rget path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:04:46.408851Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:04:46.408932Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCopyTable target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:04:46.409039Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:04:46.409268Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:46.409591Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:04:46.418809Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:04:46.418962Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:04:46.423893Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-10T12:04:46.424405Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2026-01-10T12:04:46.424878Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:46.424968Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:46.425295Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:04:46.425451Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.425540Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-10T12:04:46.425630Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:04:46.432323Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.432471Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2026-01-10T12:04:46.432975Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-10T12:04:46.434499Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:04:46.434705Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:04:46.434788Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:04:46.434863Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:04:46.434948Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:04:46.451250Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:04:46.451391Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:04:46.451429Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:04:46.451469Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 1 2026-01-10T12:04:46.451512Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:04:46.451661Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:04:46.459161Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2026-01-10T12:04:46.459447Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2026-01-10T12:04:46.459546Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-10T12:04:46.460145Z node 17 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-10T12:04:46.460498Z node 17 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2026-01-10T12:04:46.460734Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-10T12:04:46.460821Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-10T12:04:46.461027Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-10T12:04:46.461127Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-10T12:04:46.461259Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-10T12:04:46.461430Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 2 -> 3 2026-01-10T12:04:46.463523Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:04:46.478938Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:04:46.482895Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.483386Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.483496Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2026-01-10T12:04:46.483616Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2026-01-10T12:04:46.498597Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-10T12:04:46.498801Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-10T12:04:46.498913Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2026-01-10T12:04:46.498948Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:45.575074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:45.575168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.575201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:45.575235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:45.575287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:45.575321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:45.575371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:45.575433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:45.576390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.576667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:45.719277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:45.719366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:45.720359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:45.739099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:45.739599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:45.739789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:45.784489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:45.784802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:45.785538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:45.786050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:45.794109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.794297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:45.795504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:45.795577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:45.795628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:45.795708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:45.795807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:45.795984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:46.115373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.116935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:46.117652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... hildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.826249Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.826431Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 203us result status StatusSuccess 2026-01-10T12:04:48.826761Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.827572Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.827720Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 172us result status StatusSuccess 2026-01-10T12:04:48.828115Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.828706Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.828892Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 196us result status StatusSuccess 2026-01-10T12:04:48.829185Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.829730Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:48.829929Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 221us result status StatusSuccess 2026-01-10T12:04:48.830260Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:46.670682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:46.670808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.670859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:46.670900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:46.670950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:46.670987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:46.671042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.671109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:46.680296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.680724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:46.891189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:46.891275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:46.892311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.914152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:46.914645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:46.914822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:46.940176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:46.940492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:46.941392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:46.941857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:46.947195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.947417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:46.948753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:46.948850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:46.948939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:46.948984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:46.949099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:46.949339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.173495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.174703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.174887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.174963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.175952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.176235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... 026-01-10T12:04:48.422509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.422877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2026-01-10T12:04:48.429106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.429422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-10T12:04:48.432932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.433356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2026-01-10T12:04:48.433476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.433634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2026-01-10T12:04:48.436028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.436388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-10T12:04:48.439829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.443680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2026-01-10T12:04:48.443853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.444070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2026-01-10T12:04:48.453181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.453485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-10T12:04:48.464213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.464642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2026-01-10T12:04:48.464748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.464901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2026-01-10T12:04:48.476484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.476807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2026-01-10T12:04:48.480792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.481202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2026-01-10T12:04:48.481302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.481523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2026-01-10T12:04:48.501667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.501976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2026-01-10T12:04:48.545122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:48.545568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2026-01-10T12:04:48.545693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2026-01-10T12:04:48.545916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-10T12:04:48.557084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:48.557416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AllTypesColumns >> TReplicaTest::Handshake ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2026-01-10T11:59:57.102637Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-10T11:59:58.216064Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T11:59:58.577357Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T11:59:58.988069Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T11:59:59.441715Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T11:59:59.851224Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:05.146374Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:09.142458Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:14.456743Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-10T12:00:15.089496Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:17.247746Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:24.855423Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:34.626723Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:50.836295Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:52.474207Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:53.025904Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:53.553046Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:54.044392Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:54.778030Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:55.543903Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:56.543655Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:57.440887Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:58.417745Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:00:59.342258Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:49.894918Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-10T12:01:50.683104Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:51.391731Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:52.150135Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:53.343406Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:54.714259Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:55.375073Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:56.108518Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:56.786937Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-10T12:01:57.764300Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:58.691931Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:01:59.705772Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:00.754527Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:01.818116Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:02.700853Z node 86 :YQ_CONTROL_PLANE_STORAG ... L_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:06.388356Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:07.495227Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:08.691150Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:09.437649Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:10.343436Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:02:11.071970Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:28.758313Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-10T12:03:32.203139Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:33.441750Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:35.230035Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:36.883503Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:38.163744Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:41.661587Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:46.011494Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:47.018402Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:48.040393Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:49.073189Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:52.976494Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:54.002762Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:56.374608Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:57.341683Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:03:59.435484Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:00.687932Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:01.810253Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:04.216854Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-10T12:04:05.392286Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:06.624310Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:07.710561Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:08.747212Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:09.829984Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:10.889439Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:11.935923Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:12.986059Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:14.076008Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:15.203831Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:16.401007Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-10T12:04:17.518772Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:46.754963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:46.755072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.755115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:46.755150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:46.755195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:46.755224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:46.755292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:46.755367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:46.760419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:46.760756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:47.084569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:47.084637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:47.085484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:47.134204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:47.134672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:47.134830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:47.168344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:47.168664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:47.169403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:47.169891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:47.178065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.178265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:47.180639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:47.180731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.180804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:47.180860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:47.180964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:47.181159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.477204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.490965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.491055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.491156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.491258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.491415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.491501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... 8944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:04:51.023662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:04:51.023778Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:51.023813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:04:51.023874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-10T12:04:51.023907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-10T12:04:51.023947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:04:51.024266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:04:51.024316Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:04:51.024427Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:04:51.024464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:04:51.024506Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:04:51.024543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:04:51.024583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:04:51.024633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:04:51.024672Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:04:51.024704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:04:51.024780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:04:51.024819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:04:51.024862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2026-01-10T12:04:51.024896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:04:51.024928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:04:51.024977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:04:51.026068Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.026153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.026186Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:04:51.026235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:04:51.026274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:04:51.027150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.027224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.027252Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:04:51.027283Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:04:51.027320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:51.028908Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.029000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:04:51.029030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:04:51.029057Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:04:51.029087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:04:51.029157Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:04:51.037745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:04:51.038257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:04:51.038359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:04:51.038584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:04:51.038627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:04:51.038974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:04:51.039069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:04:51.039136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:742:2732] TestWaitNotification: OK eventTxId 101 2026-01-10T12:04:51.039591Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:51.039823Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 264us result status StatusSuccess 2026-01-10T12:04:51.040246Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicaTest::Commit |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_proxy/ut/unittest |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> TReplicaTest::Merge >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] >> TVPatchTests::PatchPartOk >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:04:47.211728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:04:47.211815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:47.211853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:04:47.211888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:04:47.211954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:04:47.211998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:04:47.212088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:04:47.212177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:04:47.213058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:47.213363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:04:47.653417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:04:47.653523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:47.654342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:04:47.694292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:04:47.694395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:04:47.694794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:04:47.715265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:04:47.715605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:04:47.716401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:04:47.717009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:04:47.719664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.719830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:04:47.720963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:04:47.721027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:04:47.721123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:04:47.721171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:04:47.721232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:04:47.721437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:04:47.928657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.929983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.930515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.930684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.930834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.931010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.931096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.931189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.931326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:04:47.931405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... rd__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:04:51.178791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:04:51.185384Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:51.185513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:04:51.185544Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:04:51.185576Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:04:51.185610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:04:51.185724Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:04:51.193243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:04:51.193850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:04:51.193940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:04:51.194173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:04:51.194215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:04:51.194592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:04:51.194702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:04:51.194739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:742:2732] TestWaitNotification: OK eventTxId 102 2026-01-10T12:04:51.195241Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:51.195468Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 277us result status StatusSuccess 2026-01-10T12:04:51.195825Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-10T12:04:51.206612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:51.207020Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-10T12:04:51.207107Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2026-01-10T12:04:51.207266Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:04:51.218315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-10T12:04:51.218597Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:04:51.218950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:04:51.218998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:04:51.219439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:04:51.219549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:04:51.219587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:750:2740] TestWaitNotification: OK eventTxId 103 2026-01-10T12:04:51.220174Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:04:51.220396Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 294us result status StatusSuccess 2026-01-10T12:04:51.220773Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TVPatchTests::PatchPartOk [GOOD] >> TReplicaTest::DoubleDelete [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> THealthCheckTest::Issues100GroupsListing |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> THealthCheckTest::OneIssueListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2026-01-10T12:04:52.826801Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:52.828155Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-10T12:04:52.828232Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-10T12:04:52.828466Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-10T12:04:52.828551Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-10T12:04:52.828818Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-10T12:04:52.828891Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-10T12:04:52.828982Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-10T12:04:52.829171Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2026-01-10T12:04:52.829233Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-10T12:04:52.829311Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |83.9%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |83.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2026-01-10T12:04:52.387696Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:52.387767Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:52.694024Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:04:52.694090Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:52.694277Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:52.694325Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:52.701138Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:52.701351Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-10T12:04:52.701453Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:52.701585Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-10T12:04:52.701663Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-10T12:04:52.701744Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-10T12:04:53.035198Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:53.035272Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:53.035410Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-10T12:04:53.035451Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-10T12:04:53.035523Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:53.035648Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.035689Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:53.035746Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.035913Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:53.035952Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:04:53.035987Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.036306Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-10T12:04:53.036385Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:53.038022Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:53.038076Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true |83.9%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> THealthCheckTest::SpecificServerless |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2026-01-10T12:04:52.652279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:52.652367Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:52.652476Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:52.652530Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 1, generation# 1 2026-01-10T12:04:52.652589Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-10T12:04:52.652621Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-10T12:04:52.792469Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-10T12:04:52.792562Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-10T12:04:52.792718Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-10T12:04:52.792883Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:04:52.792929Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:52.793133Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:52.793172Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:52.805664Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:52.805904Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:9:2056] 2026-01-10T12:04:52.806024Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:52.806068Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:04:52.806101Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:52.806197Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:9:2056] 2026-01-10T12:04:53.096601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:53.096671Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:53.096827Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.096891Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:53.096969Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.097110Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-10T12:04:53.097245Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-10T12:04:53.097374Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.097414Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:53.097473Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.100652Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.100728Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-10T12:04:53.100796Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.100908Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-10T12:04:53.101048Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-10T12:04:53.101127Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.101269Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:9:2056] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus |84.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |84.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2026-01-10T12:04:52.780644Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-10T12:04:52.780712Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-10T12:04:52.780820Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:04:52.780934Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-10T12:04:52.780972Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:52.781021Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:04:52.781088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:04:52.781115Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:52.781218Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:52.781245Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:52.785790Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:52.786068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:52.786101Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:04:52.786137Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.093732Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:04:53.093813Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:53.093956Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-10T12:04:53.093992Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.094057Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:04:53.094200Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.094326Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:53.094385Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.094507Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-10T12:04:53.094540Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:04:53.094571Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.094640Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:9:2056] 2026-01-10T12:04:53.094684Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.094768Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.094815Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:04:53.094877Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:04:53.094963Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:04:53.094995Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-10T12:04:53.095051Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:04:53.095136Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-10T12:04:53.095188Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-10T12:04:53.447514Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:9:2056] 2026-01-10T12:04:53.447595Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-10T12:04:53.447686Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 1, capabilities# 2026-01-10T12:04:53.447847Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:53.447891Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:04:53.448001Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:04:53.448071Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 1, generation# 1 2026-01-10T12:04:53.448224Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1006: [3:7:2054] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |84.0%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TMLPWriterTests::Deduplicated_Reboot [GOOD] |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |84.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> KikimrIcGateway::TestLoadTableMetadata >> KikimrIcGateway::TestListPath >> KikimrIcGateway::TestDropExternalTable >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:234:2060] recipient: [1:228:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:234:2060] recipient: [1:228:2145] Leader for TabletID 72057594046678944 is [1:245:2156] sender: [1:246:2060] recipient: [1:228:2145] 2026-01-10T12:03:35.040969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:35.041074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:35.041121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:35.041160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:35.041221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:35.041253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:35.041304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:35.041384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:35.042371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:35.042702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:35.140620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:35.140684Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:35.170255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:35.170708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:35.170927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:35.208752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:35.209014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:35.209712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:35.210119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:35.212424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:35.212689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:35.214069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:35.214142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:35.214200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:35.214260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:35.214333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:35.214482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:35.811000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.812040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.818951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.819044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.819128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.819229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.819297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:35.819356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:04:55.248700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:04:55.248754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:04:55.249122Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1085:2905], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.249179Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.249217Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:04:55.249375Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:982:2802], Recipient [7:250:2160]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2026-01-10T12:04:55.249419Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:04:55.249494Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:04:55.249616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:04:55.249662Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1083:2903] 2026-01-10T12:04:55.249855Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1085:2905], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.249892Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.249929Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2026-01-10T12:04:55.250379Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:956:2104], Recipient [7:250:2160] 2026-01-10T12:04:55.250433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:04:55.257682Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 956 RawX2: 34359740472 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:55.258071Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:04:55.258228Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:04:55.258508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:04:55.261501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:55.261880Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-10T12:04:55.261959Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2026-01-10T12:04:55.262444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-10T12:04:55.262501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-10T12:04:55.262969Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1091:2911], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.263036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.263080Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:04:55.263192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:982:2802], Recipient [7:250:2160]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2026-01-10T12:04:55.263228Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:04:55.263312Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-10T12:04:55.263424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:04:55.263470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:1089:2909] 2026-01-10T12:04:55.263676Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1091:2911], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.263714Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.263753Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-10T12:04:55.264631Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:956:2104], Recipient [7:250:2160] 2026-01-10T12:04:55.264692Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:04:55.267478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 956 RawX2: 34359740472 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:04:55.267826Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:04:55.267897Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2026-01-10T12:04:55.276353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:04:55.285837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:04:55.286223Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-10T12:04:55.286303Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-10T12:04:55.286804Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-10T12:04:55.286858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-10T12:04:55.287296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1097:2917], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.287363Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:04:55.287412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:04:55.287584Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:982:2802], Recipient [7:250:2160]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2026-01-10T12:04:55.287623Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:04:55.287718Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-10T12:04:55.287842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:04:55.287891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:1095:2915] 2026-01-10T12:04:55.288145Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1097:2917], Recipient [7:250:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.288187Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:04:55.288231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestLoadExternalTable >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/mlp/ut/unittest >> TMLPWriterTests::Deduplicated_Reboot [GOOD] Test command err: 2026-01-10T11:57:48.667293Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700900082055381:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:48.673967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:57:49.160876Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:57:49.739145Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:51.147032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:57:51.147137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:57:51.277390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:51.277476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:51.281848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:51.378102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:51.379530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:57:51.379706Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593700900082055354:2081] 1768046268599274 != 1768046268599277 2026-01-10T11:57:51.713718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00370f/r3tmp/yandexPRKVSk.tmp 2026-01-10T11:57:51.713756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00370f/r3tmp/yandexPRKVSk.tmp 2026-01-10T11:57:51.713908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00370f/r3tmp/yandexPRKVSk.tmp 2026-01-10T11:57:51.714006Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:57:51.745642Z INFO: TTestServer started on Port 10686 GrpcPort 12383 2026-01-10T11:57:53.664803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593700900082055381:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:57:54.293047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PQClient connected to localhost:12383 2026-01-10T11:57:54.803033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:56.031678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:06.275480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T11:58:06.275509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:09.399075Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekwadxz1aq017rkxsvqpxzr", Request deadline has expired for 6.189975s seconds 2026-01-10T11:58:09.402522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700990276369676:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:09.403790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:09.404091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700990276369689:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:09.404136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593700990276369690:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:09.404249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:09.408570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:09.419881Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593700990276369693:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T11:58:09.525626Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593700990276369757:2690] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:58:09.741379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:09.770187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:09.829044Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593700990276369765:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:58:09.834705Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzM0ZWYzOTYtYTdiYzM4YzUtMTAyNTdkZWUtZDkyMmMwOTE=, ActorId: [1:7593700990276369674:2350], ActorState: ExecuteState, LegacyTraceId: 01kekwarfqe26t2m9nzw89syyc, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:58:09.840719Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T11:58:09.840755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593700994571337347:2869] === CheckClustersList. Ok 2026-01-10T11:58:15.498911Z :TODO INFO: TTopicSdkTestSetup started 2026-01-10T11:58:15.734161Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T11:58:15.854588Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593701016046174015:2962] connected; active server actors: 1 2026-01-10T11:58:15.857147Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T11:58:15.876714Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T11:58:15.878079Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T11:58:15.913379Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T11:58:15.921843Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_184467440737095 ... 01-10T12:04:52.622867Z node 19 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:04:52.660906Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:04:52.660981Z node 19 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037894] doesn't have tx writes info 2026-01-10T12:04:52.698870Z node 19 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037895][] pipe [19:7593702722118795475:2982] connected; active server actors: 1 2026-01-10T12:04:52.712946Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7593702722118795544:2469] 2026-01-10T12:04:52.714877Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 1 [19:7593702722118795544:2469] 2026-01-10T12:04:52.714986Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-10T12:04:52.716179Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-10T12:04:52.716371Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-10T12:04:52.718923Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-10T12:04:52.718976Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-10T12:04:52.719008Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-10T12:04:52.719045Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-10T12:04:52.719071Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 0 vs 0 2026-01-10T12:04:52.719105Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-10T12:04:52.719138Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:52.719170Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:52.719209Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:04:52.721505Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037894][Partition][0][StateIdle] Updateing MLP consumer 'mlp-consumer' config 2026-01-10T12:04:52.722134Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-10T12:04:52.722491Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:04:52.749577Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7593702722118795563:3030]] Start describe 2026-01-10T12:04:52.752269Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7593702722118795563:3030]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-10T12:04:52.752309Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7593702722118795563:3030]] Start write 2026-01-10T12:04:52.758808Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:386: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPQ::TEvEndOffsetChanged. Offset: 1 2026-01-10T12:04:52.758859Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7593702722118795544:2469] >>>>>> reload PQ tablet 2026-01-10T12:04:52.760572Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2026-01-10T12:04:52.760701Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:763: [72075186224037894][0][MLP][mlp-consumer] Fetched 1 messages 2026-01-10T12:04:52.760727Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-10T12:04:52.760758Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:52.760792Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:52.761052Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:644: [72075186224037894][0][MLP][mlp-consumer] Delete old WAL: c0000000000wmlp-consumer|0000000000000000 - c0000000000wmlp-consumer|0000000000000000 2026-01-10T12:04:52.761094Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:654: [72075186224037894][0][MLP][mlp-consumer] Write Snapshot Count: 1 Size: 69 cookie: 3 2026-01-10T12:04:52.761824Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7593702722118795563:3030]] Handle TEvPersQueue::TEvResponse 2026-01-10T12:04:52.764550Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:302: [72075186224037894][0][MLP][mlp-consumer] HandleOnWrite TEvKeyValue::TEvResponse Status: 1 Cookie: 3 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2026-01-10T12:04:52.764596Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:327: [72075186224037894][0][MLP][mlp-consumer] TX write finished 2026-01-10T12:04:52.764620Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-10T12:04:52.764653Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:52.766152Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:52.766209Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:04:52.766244Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-10T12:04:52.790682Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:04:52.790750Z node 19 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-10T12:04:52.791828Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7593702722118795612:2474] 2026-01-10T12:04:52.798776Z node 19 :PERSQUEUE INFO: partition_init.cpp:1016: [topic1:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:04:52.799307Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 2 [19:7593702722118795612:2474] 2026-01-10T12:04:52.799528Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-10T12:04:52.799963Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-10T12:04:52.801289Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-10T12:04:52.802779Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-10T12:04:52.802875Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:214: [72075186224037894][0][MLP][mlp-consumer] Read snapshot 2026-01-10T12:04:52.802963Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-10T12:04:52.802994Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-10T12:04:52.803020Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-10T12:04:52.803050Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-10T12:04:52.803079Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:52.803112Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:52.803148Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:04:52.804246Z node 19 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' size 152 2026-01-10T12:04:53.716429Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-10T12:04:53.716483Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:53.716518Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:53.716554Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:04:53.764181Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7593702726413762948:3070]] Start describe 2026-01-10T12:04:53.764589Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7593702726413762948:3070]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-10T12:04:53.764612Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7593702726413762948:3070]] Start write 2026-01-10T12:04:53.767356Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7593702726413762948:3070]] Handle TEvPersQueue::TEvResponse 2026-01-10T12:04:53.803523Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-10T12:04:53.803578Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:04:53.803614Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:04:53.803651Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/mlp/ut/unittest >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |84.0%| [TM] {RESULT} ydb/core/persqueue/public/mlp/ut/unittest |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> THealthCheckTest::DatabaseDoesNotExist >> KeyValueGRPCService::SimpleWriteReadRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV2 >> THealthCheckTest::Basic >> THealthCheckTest::StaticGroupIssue >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |84.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestDropResourcePool >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |84.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] Test command err: Trying to start YDB, gRPC: 2733, MsgBus: 27224 2026-01-10T12:04:59.037946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702751365003136:2226];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:59.038007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:59.709966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:59.710103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:59.776296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:59.803765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:59.828058Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702747070035623:2081] 1768046698928015 != 1768046698928018 2026-01-10T12:04:59.863899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:00.048363Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:00.112646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:00.192475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:00.192500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:00.192507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:00.192587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:01.164629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:01.184190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:01.329498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.655323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.854565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.970106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.028818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702751365003136:2226];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:04.028903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:05.962517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702777134808579:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.962624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.962948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702777134808589:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.962986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.882553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.917678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.963233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.030502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.067801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.108230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.173637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.247586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.453082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702785724744081:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.453226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.456880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702785724744087:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.456974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702785724744086:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.457021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.464805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:07.498415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702785724744090:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:07.591075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702785724744141:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> DataShardStats::CollectKeySampleFollower [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:49.408216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:49.583018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:49.603091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:49.603652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:49.603773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:50.106528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:50.106682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.112959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046565862856 != 1768046565862859 2026-01-10T12:02:50.126066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:50.173345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.299534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:50.777914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.792808Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.905914Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:02:50.906025Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:02:50.906155Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:02:51.142394Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:02:51.142545Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:02:51.143354Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:02:51.143477Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:02:51.143981Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:02:51.144469Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:02:51.144689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:02:51.147267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:51.147884Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:02:51.149072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:02:51.149189Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:02:51.203586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:51.204907Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:51.205238Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:02:51.205566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:51.281274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:51.282153Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:51.282285Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:51.288159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:51.288303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:51.288388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:51.288847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:51.289028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:51.289128Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:02:51.289687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:51.415065Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:51.415303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:51.415446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:02:51.415494Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:51.415544Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:51.415585Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:51.415891Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:51.415966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:51.420695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:51.420864Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:51.421002Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:51.421065Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:51.421136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:51.421179Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:51.421216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:51.421257Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:51.421318Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:51.421484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:51.421542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:51.421596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:02:51.421728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:02:51.421809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:51.421973Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:51.422259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:51.422330Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:51.422444Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... 075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-10T12:05:11.924413Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2026-01-10T12:05:11.924456Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:05:11.924535Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:971:2825], Recipient [13:886:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:05:11.924570Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:05:11.924604Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2026-01-10T12:05:11.924655Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:05:11.924865Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1176:2963], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:05:11.925216Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:886:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:05:11.925257Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:05:11.925293Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-10T12:05:11.925352Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:05:11.925469Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1176:2963], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 2045 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1135 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2026-01-10T12:05:11.926681Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:11.927033Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:05:11.928674Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:11.931334Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:05:11.931562Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:886:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-10T12:05:11.931682Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:05:11.931770Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2026-01-10T12:05:11.938343Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:05:11.938657Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:971:2825], Recipient [13:886:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-10T12:05:11.938759Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:05:11.938849Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-10T12:05:12.432704Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:05:12.432832Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715667 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2026-01-10T12:05:12.459323Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1285:3071], Recipient [13:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:05:12.459828Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:05:12.459960Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v5000/18446744073709551615 ImmediateWriteEdge# v5001/0 ImmediateWriteEdgeReplied# v5001/0 2026-01-10T12:05:12.460049Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5001/18446744073709551615 2026-01-10T12:05:12.460196Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-10T12:05:12.460386Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:05:12.460474Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:05:12.460561Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:12.460630Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:05:12.460699Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:05:12.460773Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:05:12.460803Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:12.460830Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:05:12.460858Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:05:12.461033Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:05:12.461501Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1285:3071], 0} after executionsCount# 1 2026-01-10T12:05:12.461621Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1285:3071], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:05:12.461766Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1285:3071], 0} finished in read 2026-01-10T12:05:12.461878Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:05:12.461914Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:05:12.461947Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:05:12.461977Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:05:12.462039Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:05:12.462067Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:05:12.462103Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:05:12.462180Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:05:12.462376Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:05:12.472580Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1285:3071], Recipient [13:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:05:12.472707Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken >> KeyValueGRPCService::SimpleWriteReadRangeV2 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> THealthCheckTest::BridgeTwoGroups >> THealthCheckTest::Issues100Groups100VCardListing >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> THealthCheckTest::TestNoSchemeShardResponse >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit95 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] |84.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::CollectKeySampleFollower [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T11:56:58.670614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:56:59.367373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T11:56:59.423766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T11:56:59.426490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:56:59.427528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T11:57:00.345275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:57:00.345452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:57:00.350518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046214589061 != 1768046214589064 2026-01-10T11:57:00.367829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:57:00.413858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:57:00.509865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T11:57:01.160351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:57:01.204408Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:57:01.344935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:57:01.485406Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T11:57:01.498619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T11:57:01.501063Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T11:57:01.504853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:57:01.841271Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:57:01.845663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:57:01.846467Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:57:01.863977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T11:57:01.864113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T11:57:01.864167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T11:57:01.864546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:57:01.864694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:57:01.864789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T11:57:01.875630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:57:01.903325Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T11:57:01.903561Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:57:01.903691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T11:57:01.903730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:57:01.903781Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T11:57:01.903827Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:57:01.904054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T11:57:01.904123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T11:57:01.904493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T11:57:01.904590Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T11:57:01.904683Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:57:01.904739Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:57:01.904800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T11:57:01.904836Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T11:57:01.904880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T11:57:01.904918Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T11:57:01.904965Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:57:01.905083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T11:57:01.905123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T11:57:01.905179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T11:57:01.905270Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T11:57:01.905310Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T11:57:01.905412Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T11:57:01.905701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T11:57:01.905759Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T11:57:01.905882Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T11:57:01.905940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T11:57:01.905982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T11:57:01.906024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T11:57:01.906062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T11:57:01.906396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T11:57:01.906433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T11:57:01.906473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T11:57:01.906507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T11:57:01.906562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T11:57:01.906605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T11:57:01.906646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T11:57:01.906693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T11:57:01.906721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T11:57:01.908485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T11:57:01.908549Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:57:01.919712Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 46644480 FollowerId: 1 2026-01-10T12:05:09.777391Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:09.789385Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:09.789477Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:10.690758Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:10.691138Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 38 2026-01-10T12:05:10.695780Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:05:10.695853Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-10T12:05:10.703625Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-10T12:05:10.704284Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-10T12:05:10.705147Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [16:877:2759], Recipient [16:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:05:10.716582Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:10.716680Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:10.716897Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 38 2026-01-10T12:05:11.696435Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:05:11.696522Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:05:11.696633Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 76000 last cleanup 0 2026-01-10T12:05:11.696704Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:11.696750Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:11.696823Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:11.696895Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:11.697080Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:11.708066Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:11.708161Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:13.987726Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [16:1627:3418], Recipient [16:924:2786]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 KeysSize: 3 2026-01-10T12:05:13.987871Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-10T12:05:13.988114Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-10T12:05:13.988230Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-10T12:05:13.988353Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-10T12:05:13.988531Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:05:13.988613Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:05:13.988694Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:13.988764Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:05:13.988826Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-10T12:05:13.988904Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:05:13.988940Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:13.988972Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:05:13.989004Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:05:13.989196Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:05:14.004400Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Restart 2026-01-10T12:05:14.004510Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037888 2026-01-10T12:05:14.024546Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-10T12:05:14.024650Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:05:14.024820Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:05:14.025246Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[16:1627:3418], 0} after executionsCount# 2 2026-01-10T12:05:14.025347Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[16:1627:3418], 0} sends rowCount# 3, bytes# 48, quota rows left# 998, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-10T12:05:14.025484Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[16:1627:3418], 0} finished in read 2026-01-10T12:05:14.025615Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:05:14.025659Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:05:14.025696Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:05:14.025735Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:05:14.025800Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:05:14.025832Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:05:14.025878Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-10T12:05:14.025947Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:05:14.026127Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:05:14.030371Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [16:1627:3418], Recipient [16:924:2786]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:05:14.030450Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-10T12:05:14.030555Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 42 } }, { items { uint32_value: 44 } }, { items { uint32_value: 46 } } TEST 9: EvGetTableStats(collectKeySample=false) after the collected key sample becomes invalid TEST Sending the EvGetTableStats message to the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 2026-01-10T12:05:14.037491Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [16:1629:3421], Recipient [16:924:2786]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:14.037620Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:14.037740Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037888, clientId# [16:1628:3420], serverId# [16:1629:3421], sessionId# [0:0:0] 2026-01-10T12:05:14.038042Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553160, Sender [16:829:2724], Recipient [16:924:2786]: NKikimrTxDataShard.TEvGetTableStats TableId: 38 CollectKeySample: false TEST Received the TEvGetTableStatsResult response from the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 DatashardId: 72075186224037888 TableLocalId: 38 TableStats { InMemSize: 0 LastAccessTime: 80503 LastUpdateTime: 0 } TabletMetrics { CPU: 168 } FullStatsReady: false TableOwnerId: 72057594046644480 FollowerId: 1 |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_stats/unittest |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> THealthCheckTest::ShardsLimit999 >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues >> KqpResultSetFormats::ArrowFormat_Types_List_3 >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THealthCheckTest::Issues100VCardListing [GOOD] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [FAIL] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [FAIL] >> THealthCheckTest::Issues100GroupsMerging >> THiveTest::TestLockTabletExecutionBadUnlock >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] >> THealthCheckTest::BridgeGroupDegradedInBothPiles >> KikimrIcGateway::TestAlterResourcePool >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestDropStreamingQuery |84.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest |84.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 8707, MsgBus: 10407 2026-01-10T12:04:57.157164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702745579413461:2210];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:57.157394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:57.218873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:57.592176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:57.618860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:57.618971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:57.682912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:57.713615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:57.716238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702745579413275:2081] 1768046697097093 != 1768046697097096 2026-01-10T12:04:57.866769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:57.958238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:57.958259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:57.958266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:57.958344Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:58.172265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:59.054803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:59.093790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:04:59.115433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-10T12:04:59.174574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-10T12:04:59.188652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21073, MsgBus: 29229 2026-01-10T12:05:02.601674Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702764977721319:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:02.601714Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:02.793504Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702764977721296:2081] 1768046702599174 != 1768046702599177 2026-01-10T12:05:02.793670Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:02.864949Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:02.872738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:02.872840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:02.905016Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:03.080227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:03.138889Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:03.138910Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:03.138922Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:03.138991Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:03.660462Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:04.036119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:04.079592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 4512, MsgBus: 9123 2026-01-10T12:05:11.377186Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593702802028734861:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:11.377338Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:11.472168Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:11.524518Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:11.526467Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:11.526535Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:11.562591Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:11.656646Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:11.680605Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:11.680630Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:11.680637Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:11.680700Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:12.408331Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:12.441879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:12.472811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 9110, MsgBus: 26158 2026-01-10T12:05:16.200694Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:16.216308Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:16.395521Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:16.400367Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593702824336098527:2081] 1768046716087449 != 1768046716087452 2026-01-10T12:05:16.446514Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:16.446629Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:16.485020Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:16.508162Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:16.700116Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:16.700135Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:16.700142Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:16.700235Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:17.170169Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:17.618581Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:17.627931Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:17.712509Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:21.450494Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702845810936008:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.450496Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702845810935985:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.450573Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702845810936007:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.450648Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.455921Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:21.456266Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702845810936022:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.456335Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:21.470056Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702845810936023:2572] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:05:21.482608Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702845810936021:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:05:21.482700Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702845810936020:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:05:21.576229Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702845810936073:2606] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:21.576883Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702845810936074:2607] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:23.095806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:05:24.393535Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:24.563443Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone >> TStorageTenantTest::Empty [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] |84.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> KeyValueGRPCService::SimpleWriteListRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV2 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> TRtmrTest::CreateWithoutTimeCastBuckets >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> KikimrIcGateway::TestAlterResourcePool [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |84.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:48.852272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:48.973239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:48.982008Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:48.982453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:48.982561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:49.459812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:49.459961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:49.469539Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046564670334 != 1768046564670337 2026-01-10T12:02:49.485114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.554968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:49.703166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:50.169004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.189444Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.301307Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:02:50.301385Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:02:50.301488Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:02:50.455594Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:02:50.455680Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:02:50.456349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:02:50.456461Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:02:50.456771Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:02:50.456885Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:02:50.457022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:02:50.458903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.459385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:02:50.460235Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:02:50.460319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:02:50.493867Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:50.494893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:50.495153Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:02:50.495410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:50.557962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:50.559343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:50.559469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:50.561252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:50.561332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:50.561385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:50.561803Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:50.561964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:50.562224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:02:50.562798Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:50.658588Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:50.658800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:50.658924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:02:50.658963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:50.659001Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:50.659051Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:50.659288Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:50.659345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:50.659662Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:50.659764Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:50.659884Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:50.659930Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:50.659985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:50.660024Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:50.660061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:50.663697Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:50.663821Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:50.663997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:50.664051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:50.664137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:02:50.664253Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:02:50.664341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:50.664459Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:50.664739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:50.664805Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:50.664939Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... node 13 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2026-01-10T12:05:31.260616Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1267:3069]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:05:32.677481Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:05:32.677604Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715671 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-10T12:05:32.682582Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1842:3547], Recipient [13:994:2849]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-10T12:05:32.682856Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:05:32.682958Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:05:32.683053Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-10T12:05:32.683174Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-10T12:05:32.683403Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:05:32.683494Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:05:32.683570Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:32.683636Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:05:32.683706Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-10T12:05:32.683785Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:05:32.683817Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:32.683846Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:05:32.683876Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:05:32.684036Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:05:32.684811Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[13:1842:3547], 0} after executionsCount# 1 2026-01-10T12:05:32.684939Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[13:1842:3547], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-10T12:05:32.685098Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[13:1842:3547], 0} finished in read 2026-01-10T12:05:32.685229Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:05:32.685261Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:05:32.685293Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:05:32.685326Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:05:32.685390Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:05:32.685417Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:05:32.685449Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-10T12:05:32.685529Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:05:32.685737Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:05:32.687992Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1842:3547], Recipient [13:994:2849]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:05:32.688676Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2026-01-10T12:05:33.637483Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:05:33.637625Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715672 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-10T12:05:33.642839Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1872:3573], Recipient [13:1267:3069]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-10T12:05:33.643088Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-10T12:05:33.643185Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:05:33.643296Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-10T12:05:33.643430Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2026-01-10T12:05:33.643645Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-10T12:05:33.643723Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2026-01-10T12:05:33.643795Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:33.643859Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-10T12:05:33.643928Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2026-01-10T12:05:33.644031Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-10T12:05:33.644090Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:33.644125Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2026-01-10T12:05:33.644155Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2026-01-10T12:05:33.644348Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:05:33.644875Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[13:1872:3573], 0} after executionsCount# 1 2026-01-10T12:05:33.645001Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[13:1872:3573], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-10T12:05:33.645173Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[13:1872:3573], 0} finished in read 2026-01-10T12:05:33.645312Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-10T12:05:33.645351Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2026-01-10T12:05:33.645387Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2026-01-10T12:05:33.645421Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2026-01-10T12:05:33.645482Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-10T12:05:33.645510Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2026-01-10T12:05:33.645575Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037891 has finished 2026-01-10T12:05:33.645665Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-10T12:05:33.645879Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2026-01-10T12:05:33.648255Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1872:3573], Recipient [13:1267:3069]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:05:33.648363Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:48.524694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:48.679431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:48.697478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:48.698011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:48.698134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:49.182914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:49.183077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:49.190101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046564772124 != 1768046564772127 2026-01-10T12:02:49.206810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:49.258509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:49.390980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:49.736053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:49.757968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:49.869712Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:02:49.869825Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:02:49.869931Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:02:50.117843Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:02:50.117936Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:02:50.118608Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:02:50.118714Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:02:50.119076Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:02:50.119210Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:02:50.119356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:02:50.121485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:50.121877Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:02:50.122648Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:02:50.122721Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:02:50.163891Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:50.164940Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:50.165215Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:02:50.165466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:50.228689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:50.229542Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:50.229652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:50.231330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:50.231406Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:50.231476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:50.231866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:50.232028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:50.232297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:02:50.232903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:50.298678Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:50.298869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:50.298960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:02:50.299004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:50.299039Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:50.299075Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:50.299334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:50.299406Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:50.299698Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:50.299798Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:50.299897Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:50.299947Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:50.300005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:50.300041Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:50.300097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:50.300164Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:50.300213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:50.300330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:50.300370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:50.302640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:02:50.302845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:02:50.302892Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:50.303024Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:50.303270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:50.303325Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:50.303434Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [14:1197:2877] TxId: 281474976715665. Ctx: { TraceId: 01kekwr9vfa029tadfh676rcj4, Database: , SessionId: ydb://session/3?node_id=14&id=YWIwNDJiMzUtNzFiMTc1ZmEtMzNhZjE4ZmEtYTY1ZjE4Nzc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-10T12:05:33.439517Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=YWIwNDJiMzUtNzFiMTc1ZmEtMzNhZjE4ZmEtYTY1ZjE4Nzc=, ActorId: [14:1046:2877], ActorState: ExecuteState, LegacyTraceId: 01kekwr9vfa029tadfh676rcj4, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/table`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-10T12:05:33.440724Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [14:1067:2877], Recipient [14:894:2772]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } Op: Rollback } 2026-01-10T12:05:33.440775Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-10T12:05:33.440873Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-10T12:05:33.440922Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... waiting for blocked lock status ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 ... waiting for blocked lock status (done) 2026-01-10T12:05:33.443288Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [14:828:2723], Recipient [14:894:2772]: NActors::TEvents::TEvPoison 2026-01-10T12:05:33.448286Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-01-10T12:05:33.448445Z node 14 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:05:33.481528Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [14:1202:3007], Recipient [14:1204:3008]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:33.483927Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [14:1202:3007], Recipient [14:1204:3008]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:33.489157Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [14:1202:3007], Recipient [14:1204:3008]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:33.507117Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1204:3008] 2026-01-10T12:05:33.507579Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:33.523035Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:33.529140Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:33.536780Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:33.536962Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:33.537083Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:33.537861Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:33.538235Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:33.538342Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:33.538439Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2026-01-10T12:05:33.538639Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:05:33.538719Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:05:33.538887Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [14:1218:3015] 2026-01-10T12:05:33.538956Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:33.539023Z node 14 :TX_DATASHARD INFO: datashard.cpp:1292: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2026-01-10T12:05:33.539087Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:33.539519Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [14:70:2117], Recipient [14:1204:3008]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2026-01-10T12:05:33.539962Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [14:1204:3008], Recipient [14:1204:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:33.540018Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:33.544430Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435075, Sender [14:1204:3008], Recipient [14:1204:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2026-01-10T12:05:33.544503Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3181: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2026-01-10T12:05:33.545088Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1204:3008]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 700} 2026-01-10T12:05:33.545152Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T12:05:33.545238Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 700 2026-01-10T12:05:33.545324Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:33.546989Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:33.547079Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2026-01-10T12:05:33.547315Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-10T12:05:33.547397Z node 14 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-10T12:05:33.547482Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-10T12:05:33.552988Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1204:3008]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 500 NextReadStep# 700 ReadStep# 700 } 2026-01-10T12:05:33.553086Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-10T12:05:33.553174Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 500 next step 700 2026-01-10T12:05:33.553484Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [14:1204:3008], Recipient [14:1104:2921]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-10T12:05:33.553533Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:05:33.553619Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-10T12:05:33.553779Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-10T12:05:33.553869Z node 14 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 600:281474976715663 at 72075186224037889 2026-01-10T12:05:33.553963Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:05:33.554063Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-10T12:05:33.554281Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:33.554455Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [14:1104:2921], Recipient [14:1204:3008]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-10T12:05:33.554512Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:05:33.554603Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-10T12:05:33.554728Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:05:33.716308Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:05:36.984691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:05:36.984791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:05:36.984824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:05:36.984861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:05:36.984900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:05:36.984926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:05:36.984978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:05:36.985092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:05:36.985954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:05:36.986246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:05:37.083077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:37.083150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:37.099803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:05:37.099936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:05:37.100188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:05:37.119518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:05:37.119802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:05:37.120598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:05:37.120882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:05:37.132312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:05:37.132574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:05:37.134054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:05:37.134133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:05:37.134217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:05:37.134272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:37.134392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:05:37.134726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:05:37.309364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.310554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.310705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.310817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.310911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:05:37.311847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:05:38.064400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-10T12:05:38.064538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-10T12:05:38.064901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:05:38.065041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:05:38.065099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2026-01-10T12:05:38.065208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-10T12:05:38.065409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:05:38.065482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 FAKE_COORDINATOR: Erasing txId 100 2026-01-10T12:05:38.067790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:05:38.067826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:38.067982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:05:38.068064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:05:38.068110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:05:38.068160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-10T12:05:38.068395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:05:38.068443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:05:38.068558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:05:38.068591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:05:38.068628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:05:38.068659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:05:38.068692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:05:38.068729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:05:38.068766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:05:38.068797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:05:38.068868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:05:38.068908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-10T12:05:38.068939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:05:38.068978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:05:38.069773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:05:38.069870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:05:38.069909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:05:38.069947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:05:38.069988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:05:38.070713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:05:38.070795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:05:38.070825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:05:38.070848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:05:38.070867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:05:38.070912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-10T12:05:38.073814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:05:38.074630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-10T12:05:38.074850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:05:38.074915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-10T12:05:38.075304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:05:38.075382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:05:38.075408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:722:2712] TestWaitNotification: OK eventTxId 100 2026-01-10T12:05:38.075766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:05:38.075911Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 158us result status StatusSuccess 2026-01-10T12:05:38.076196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } RtmrVolumeDescription { Name: "rtmr1" PathId: 38 PartitionsCount: 0 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates >> KqpBatchDelete::Large_1 >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:47.639340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:47.814728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:47.827804Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:47.829541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:47.829696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:48.222166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:48.222316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:48.230048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046563890552 != 1768046563890555 2026-01-10T12:02:48.244578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:48.293012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:48.398918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:48.852293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:48.869622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:48.988817Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:02:48.988899Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:02:48.989019Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:02:49.158705Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:02:49.158810Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:02:49.159492Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:02:49.159625Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:02:49.159975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:02:49.160123Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:02:49.160265Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:02:49.162363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:49.162855Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:02:49.163612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:02:49.163689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:02:49.201766Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:49.205011Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:49.205350Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:02:49.205626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:49.261233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:49.261917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:49.262082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:49.263699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:49.263779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:49.263876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:49.264252Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:49.264398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:49.264484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:02:49.264976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:49.330560Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:49.330757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:49.330857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:02:49.330897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:49.330947Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:49.330995Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:49.331249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:49.331307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:49.331611Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:49.331704Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:49.331824Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:49.331866Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:49.331905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:49.331957Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:49.332006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:49.332043Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:49.332109Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:49.332202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:49.332236Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:49.332274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:02:49.332356Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:02:49.332428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:49.332538Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:49.332774Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:49.332823Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:49.332940Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ode 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:05:36.275492Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:05:36.275575Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:05:36.275607Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:05:36.275653Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-10T12:05:36.275784Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:05:36.275861Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:05:36.275953Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2026-01-10T12:05:36.276217Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-10T12:05:36.276352Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:36.276746Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [16:1106:2875], Table: `/Root/table` ([72057594046644480:38:1]), SessionActorId: [16:1045:2875]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:1106:2875].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-10T12:05:36.277007Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [16:1100:2875], SessionActorId: [16:1045:2875], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:1045:2875]. 2026-01-10T12:05:36.277641Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=16&id=ZmJmM2VhYmUtZDM3YjUzYTgtYmIyNDMyYjktYmQwYzFlYzU=, ActorId: [16:1045:2875], ActorState: ExecuteState, LegacyTraceId: 01kekwrcgq1etrk021nn2c4kq6, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:1101:2875] from: [16:1100:2875] trace_id# 2026-01-10T12:05:36.277942Z node 16 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [16:1101:2875] TxId: 281474976715663. Ctx: { TraceId: 01kekwrcgq1etrk021nn2c4kq6, Database: , SessionId: ydb://session/3?node_id=16&id=ZmJmM2VhYmUtZDM3YjUzYTgtYmIyNDMyYjktYmQwYzFlYzU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-10T12:05:36.278596Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=16&id=ZmJmM2VhYmUtZDM3YjUzYTgtYmIyNDMyYjktYmQwYzFlYzU=, ActorId: [16:1045:2875], ActorState: ExecuteState, LegacyTraceId: 01kekwrcgq1etrk021nn2c4kq6, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:05:36.280227Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [16:1100:2875], Recipient [16:894:2773]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-10T12:05:36.280286Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-10T12:05:36.280471Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [16:894:2773], Recipient [16:894:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:05:36.280512Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:05:36.280610Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-10T12:05:36.280790Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-10T12:05:36.280933Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:05:36.281048Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-10T12:05:36.281098Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:05:36.281132Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:05:36.281186Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:36.281234Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:05:36.281313Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v501/0 ImmediateWriteEdgeReplied# v501/0 2026-01-10T12:05:36.281400Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-10T12:05:36.281439Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:05:36.281470Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:36.281496Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:05:36.281524Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:05:36.281557Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:05:36.281585Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:05:36.281611Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:05:36.281638Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:05:36.281676Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-10T12:05:36.281801Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-10T12:05:36.281888Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-10T12:05:36.282004Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:05:36.282139Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:05:36.282186Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:05:36.282272Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:05:36.282349Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:05:36.282409Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-10T12:05:36.282445Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:05:36.282479Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:05:36.282511Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:05:36.282568Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:05:36.282601Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:05:36.282631Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-10T12:05:36.282701Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:05:36.282736Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:05:36.282775Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:05:36.282850Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:36.286058Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [16:70:2117], Recipient [16:894:2773]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2026-01-10T12:05:36.290284Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [16:1114:2925], Recipient [16:894:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:36.290409Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:36.290509Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [16:1113:2924], serverId# [16:1114:2925], sessionId# [0:0:0] 2026-01-10T12:05:36.290760Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [16:829:2724], Recipient [16:894:2773]: NKikimr::TEvDataShard::TEvGetOpenTxs |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TCacheTest::MigrationCommon >> TCacheTest::RacyRecreateAndSync >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TCacheTest::Navigate >> TCacheTest::Attributes >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> TCacheTest::MigrationLostMessage >> KikimrIcGateway::TestDropStreamingQuery [GOOD] >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TCacheTest::PathBelongsToDomain [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TCacheTest::Recreate >> THealthCheckTest::BridgeGroupDegradedInBothPiles [GOOD] >> THealthCheckTest::BridgeGroupDegradedInOnePile >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2026-01-10T12:05:41.317368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:41.317445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 2 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:42.897189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:05:42.914861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |84.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2026-01-10T12:04:01.990009Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:04:02.027021Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:04:02.027355Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:04:02.028334Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:04:02.028784Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:04:02.030045Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:04:02.030117Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:04:02.031187Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-10T12:04:02.031231Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:04:02.031367Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:04:02.031530Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:04:02.045576Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:04:02.045651Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:04:02.058208Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.058420Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.058635Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.058791Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.058940Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.059132Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.059340Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:02.059376Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:04:02.059496Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-10T12:04:02.059546Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-10T12:04:02.059612Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:04:02.059694Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:04:02.064836Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:04:02.133282Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:04:02.133376Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:02.135485Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:04:02.136141Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:02.136197Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:04:02.136853Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:02.144809Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:04:02.145536Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:04:02.145711Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-10T12:04:02.145876Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-10T12:04:02.145945Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T12:04:02.146013Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:04:02.146053Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:04:02.146112Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:04:02.164483Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-10T12:04:02.164565Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-10T12:04:02.164738Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-10T12:04:02.164815Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:04:02.165002Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:04:02.165218Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-10T12:04:02.165264Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-10T12:04:02.165331Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-10T12:04:02.202370Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-10T12:04:02.203801Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:04:02.203982Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:04:02.204199Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-10T12:04:02.204562Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-10T12:04:02.204631Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-10T12:04:02.204699Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-10T12:04:02.204746Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:04:02.216463Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:04:02.216812Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:04:02.217033Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T12:04:02.217223Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:04:02.217507Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-10T12:04:02.217709Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:04:02.218460Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:04:02.218695Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-10T12:04:02.218750Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-10T12:04:02.218896Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-10T12:04:02.218931Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-10T12:04:02.218963Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-10T12:04:02.219050Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest: ... 1-10T12:05:40.343111Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046447617 Cookie: 2} 2026-01-10T12:05:40.343184Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046447617 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.343307Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046447617 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.343377Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046447617 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.343480Z node 44 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594046447617 leader: [0:0:0] followers: 0 2026-01-10T12:05:40.343657Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594046447617] forward result error, check reconnect [44:391:2238] 2026-01-10T12:05:40.343717Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594046447617] schedule retry [44:391:2238] 2026-01-10T12:05:40.402295Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [44:199:2162] 2026-01-10T12:05:40.402386Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [44:199:2162] 2026-01-10T12:05:40.402509Z node 44 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:05:40.402593Z node 44 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 44 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:05:40.402821Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:05:40.402966Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T12:05:40.403038Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T12:05:40.403083Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T12:05:40.403169Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.403260Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.403319Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.403427Z node 44 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-10T12:05:40.403515Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [44:199:2162] 2026-01-10T12:05:40.403555Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [44:199:2162] 2026-01-10T12:05:40.428476Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594046316545] client retry [44:435:2278] 2026-01-10T12:05:40.428564Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594046316545] lookup [44:435:2278] 2026-01-10T12:05:40.428718Z node 44 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594046316545 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594046316545 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:05:40.428795Z node 44 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 44 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:05:40.428997Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594046316545 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:05:40.429163Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 0} 2026-01-10T12:05:40.429223Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 1} 2026-01-10T12:05:40.429263Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 2} 2026-01-10T12:05:40.429314Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.429431Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.429509Z node 44 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:05:40.429610Z node 44 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594046316545 leader: [0:0:0] followers: 0 2026-01-10T12:05:40.429685Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594046316545] forward result error, check reconnect [44:435:2278] 2026-01-10T12:05:40.429723Z node 44 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594046316545] schedule retry [44:435:2278] 2026-01-10T12:05:40.467838Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72075186224037888] client retry [45:563:2214] 2026-01-10T12:05:40.467921Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [45:563:2214] 2026-01-10T12:05:40.468032Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [45:471:2156] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:05:40.468121Z node 45 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 45 selfDC 2 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:05:40.468312Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:05:40.468764Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:05:40.468836Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:05:40.468877Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:05:40.469109Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:575:2315] CurrentLeaderTablet: [44:577:2316] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T12:05:40.469225Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:575:2315] CurrentLeaderTablet: [44:577:2316] CurrentGeneration: 2 CurrentStep: 0} 2026-01-10T12:05:40.469353Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [44:575:2315] followers: 0 2026-01-10T12:05:40.469435Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [44:575:2315] 2026-01-10T12:05:40.469535Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 44 [45:563:2214] 2026-01-10T12:05:40.469672Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [45:563:2214] 2026-01-10T12:05:40.469719Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:563:2214] 2026-01-10T12:05:40.470036Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [45:563:2214] 2026-01-10T12:05:40.470482Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [45:563:2214] 2026-01-10T12:05:40.470527Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [45:563:2214] 2026-01-10T12:05:40.470563Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [45:563:2214] 2026-01-10T12:05:40.470668Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:563:2214] 2026-01-10T12:05:40.470914Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [45:560:2214] EventType# 268959744 2026-01-10T12:05:40.471105Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-10T12:05:40.471170Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:05:40.471364Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.471506Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.471583Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{7, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-10T12:05:40.471635Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:05:40.471908Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-10T12:05:40.471956Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:05:40.472057Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:05:40.472536Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2026-01-10T12:05:41.795987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:41.796069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... Step5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:42.997849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:05:43.010346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:05:43.011645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2026-01-10T12:05:43.028839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:05:43.029101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2026-01-10T12:05:43.030319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 FAKE_COORDINATOR: Erasing txId 104 TestModificationResult got TxId: 104, wait until txId: 104 TestModificationResults wait txId: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000042 FAKE_COORDINATOR: Erasing txId 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 104 TestWaitNotification wait txId: 105 2026-01-10T12:05:43.044378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:05:43.044504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 104 2026-01-10T12:05:43.051619Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:505:2473], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:05:43.052210Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:507:2475], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:43.052588Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:509:2477], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:43.054595Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:522:2484], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:05:43.056032Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:531:2487], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:05:43.057167Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:539:2495], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:05:43.057473Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:541:2497], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:43.057754Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:543:2499], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:05:43.058626Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:549:2505], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:05:43.058988Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:551:2507], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> TCacheTest::CheckSystemViewAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 13890, MsgBus: 19805 2026-01-10T12:04:57.102705Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702742923351570:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:57.102752Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:57.784184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:57.845858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:57.845974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:58.025227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:58.050410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:58.100158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:58.124035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:58.240205Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.019598s 2026-01-10T12:04:58.392971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:58.392994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:58.393004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:58.393076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:59.742849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:59.803113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:04:59.843801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:02.107544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702742923351570:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:02.107613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:03.853327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702768693156103:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:03.853535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:03.855500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702768693156113:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:03.855628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.327271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.492590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.541430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.656899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.777810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702772988123721:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.777903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.778255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702772988123726:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.778303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702772988123727:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.778405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:04.782802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:04.802271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702772988123730:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-10T12:05:04.979799Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702772988123781:2775] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22204, MsgBus: 31030 2026-01-10T12:05:06.773110Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702780071218350:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:06.773199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:07.067089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:07.079039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:07.079132Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:07.098523Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702780071218323:2081] 1768046706770016 != 1768046706770019 2026-01-10T12:05:07.164837Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:07.190099Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:07.392179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:07.411933Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:07.411954Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:07.411963Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:07.412041Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:07.812231Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:08.509568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:11.776526Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593702780071218350:2067];send_to=[0:7307199536658146 ... (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:05:27.910225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-01-10T12:05:27.961951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:05:28.108682Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 6773, MsgBus: 18724 2026-01-10T12:05:30.946049Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593702883246429843:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:30.946103Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:31.042482Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:31.576043Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:31.576161Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:31.580357Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593702883246429619:2081] 1768046730913448 != 1768046730913451 2026-01-10T12:05:31.625465Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:31.683865Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:31.684130Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:31.920064Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:31.920110Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:31.920118Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:31.920251Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:31.968356Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:33.327183Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:33.336384Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:33.366312Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:35.945387Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702883246429843:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:35.945459Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:37.536869Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702913311201701:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.536983Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.537472Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702913311201720:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.537515Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702913311201721:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.537549Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702913311201722:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.537582Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702913311201723:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.537700Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:37.542746Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:37.555433Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702913311201732:2585] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:05:37.556312Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702913311201733:2586] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:05:37.564620Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:37.564669Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:37.565915Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:37.567066Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702913311201730:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:05:37.567114Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702913311201728:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:05:37.567150Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702913311201729:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:05:37.629162Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702913311201786:2621] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:37.641189Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702913311201797:2628] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:37.663492Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702913311201812:2635] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:38.459984Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:05:39.190378Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:39.329613Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TCacheTest::SysLocks [GOOD] >> TCacheTest::MigrationUndo [GOOD] >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2026-01-10T12:05:42.470728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:42.470789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:44.594331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-10T12:05:44.600003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:05:44.603591Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2026-01-10T12:05:44.606814Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:430:2410], for# user1@builtin, access# DescribeSchema 2026-01-10T12:05:44.607516Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:436:2416], for# user1@builtin, access# |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2026-01-10T12:05:43.701631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:43.701700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:44.872539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV2 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2026-01-10T12:05:42.796321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:42.796381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... rUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/compile_cache_queries', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.268677Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.268737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720668 2026-01-10T12:05:45.268806Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_read_bytes_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.269136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-10T12:05:45.269216Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.269420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-10T12:05:45.269473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-10T12:05:45.269525Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.269759Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.269947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-10T12:05:45.270000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-10T12:05:45.270047Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-10T12:05:45.270206Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-10T12:05:45.270356Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-10T12:05:45.270649Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270684Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-10T12:05:45.270784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-10T12:05:45.270825Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-10T12:05:45.270887Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270922Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.270949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-10T12:05:45.270985Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-10T12:05:45.271092Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-10T12:05:45.271190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-10T12:05:45.271228Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-10T12:05:45.271286Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-10T12:05:45.271371Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-10T12:05:45.271605Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-10T12:05:45.271674Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-10T12:05:45.274817Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-10T12:05:45.275013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:601) 2026-01-10T12:05:45.283110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-10T12:05:45.283748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-10T12:05:45.294035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-10T12:05:45.294624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:05:45.297319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2026-01-10T12:05:45.308441Z node 2 :TX_DATASHARD ERROR: datashard.cpp:3603: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186233409548 Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:733:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:736:2067] recipient: [2:735:2628] Leader for TabletID 72057594046678944 is [2:737:2629] sender: [2:738:2067] recipient: [2:735:2628] 2026-01-10T12:05:45.380014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:45.380118Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithoutRealSystemViewPaths::SystemViews >> TCacheTest::List >> THealthCheckTest::TestNoSchemeShardResponse [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:02:49.581119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:02:49.755925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:02:49.764319Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:02:49.764785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:02:49.764923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:02:50.186424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:02:50.186557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:02:50.194163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046565693311 != 1768046565693314 2026-01-10T12:02:50.208859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:02:50.258208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:02:50.385973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:02:50.783617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:02:50.801417Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:02:50.921995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:02:50.922076Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:02:50.922186Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:02:51.077692Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:02:51.077815Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:02:51.078434Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:02:51.078522Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:02:51.078932Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:02:51.079056Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:02:51.079207Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:02:51.081274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:02:51.081753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:02:51.082485Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:02:51.082560Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:02:51.116659Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:02:51.117710Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:02:51.118012Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:02:51.118260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:02:51.176738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:02:51.177577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:02:51.177690Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:02:51.179566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:02:51.179649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:02:51.179757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:02:51.180245Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:02:51.180421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:02:51.180506Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:02:51.192737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:02:51.282217Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:02:51.282410Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:02:51.282510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:02:51.282552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:02:51.282587Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:02:51.282619Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:02:51.282850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:51.282909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:02:51.283218Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:02:51.283318Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:02:51.283411Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:02:51.283456Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:02:51.283497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:02:51.283544Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:02:51.283585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:02:51.283614Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:02:51.283654Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:02:51.283744Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:51.283784Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:02:51.283828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:02:51.283934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:02:51.284012Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:02:51.288161Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:02:51.288473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:02:51.288532Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:02:51.288658Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... efault, DatabaseId: /Root, IsStreamingQuery: 0}. ExecuteDatashardTransaction trace_id_verbosity# 0 trace_id# 2026-01-10T12:05:43.322210Z node 13 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 1 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:05:43.322261Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:146} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Datashard not finished yet actor_state# WaitResolveState shard_id# 72075186224037888 state# Executing trace_id# 2026-01-10T12:05:43.322308Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), trace_id# 2026-01-10T12:05:43.322355Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:05:43.322611Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [13:1185:2878], Recipient [13:1154:2961]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1185 RawX2: 55834577726 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010&8\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2026-01-10T12:05:43.322666Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:05:43.322795Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [13:1154:2961], Recipient [13:1154:2961]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:05:43.322825Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:05:43.322897Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:05:43.323074Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:05:43.323169Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2026-01-10T12:05:43.323225Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-10T12:05:43.323256Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2026-01-10T12:05:43.323281Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:05:43.323308Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:05:43.323343Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2026-01-10T12:05:43.323386Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715665] at 72075186224037888 2026-01-10T12:05:43.323416Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-10T12:05:43.323438Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:05:43.323458Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:05:43.323484Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:05:43.323507Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-10T12:05:43.323530Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:05:43.323550Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-10T12:05:43.323571Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-10T12:05:43.323653Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-10T12:05:43.323785Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-10T12:05:43.323880Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-10T12:05:43.323950Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-10T12:05:43.323977Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-10T12:05:43.323998Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:05:43.324022Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:43.324070Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:05:43.324201Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2026-01-10T12:05:43.324230Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:05:43.324257Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:05:43.324282Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:05:43.324348Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-10T12:05:43.324387Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:05:43.324414Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2026-01-10T12:05:43.324471Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:05:43.324500Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:43.324533Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:43.324696Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1413} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037888 status# COMPLETE error# trace_id# 2026-01-10T12:05:43.324852Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:05:43.324959Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [13:1185:2878] TxId: 281474976715665. Ctx: { TraceId: 01kekwrkjyaczyntwmz21bvnfy, Database: , SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:05:43.325120Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, ActorId: [13:1044:2878], ActorState: CleanupState, LegacyTraceId: 01kekwrkjyaczyntwmz21bvnfy, EndCleanup is_final# false trace_id# 2026-01-10T12:05:43.325314Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=ZmRhZjcyNDAtMTA1MGQyNWYtNzNiMmNmZDQtYjk3MGM4N2Q=, ActorId: [13:1044:2878], ActorState: CleanupState, LegacyTraceId: 01kekwrkjyaczyntwmz21bvnfy, Sent query response back to proxy proxy_request_id# 8 proxy_id# [13:66:2113] trace_id# 2026-01-10T12:05:43.609918Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [13:1194:2987], Recipient [13:1154:2961]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:43.610038Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:43.610160Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [13:1193:2986], serverId# [13:1194:2987], sessionId# [0:0:0] 2026-01-10T12:05:43.610416Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [13:829:2724], Recipient [13:1154:2961]: NKikimr::TEvDataShard::TEvGetOpenTxs |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TCacheTest::SystemViews >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] >> TCacheTestWithoutRealSystemViewPaths::SystemViews [GOOD] >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2026-01-10T12:03:34.228577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:03:34.228724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:34.228774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:03:34.228817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:03:34.228878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:03:34.228914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:03:34.229007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:03:34.229161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:03:34.230166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:03:34.230500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:03:34.413063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:03:34.413152Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:34.485442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:03:34.486434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:03:34.486676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:03:34.497122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:03:34.497473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:03:34.498324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:03:34.499268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:03:34.503267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:34.503491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:03:34.504835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:03:34.504904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:03:34.505099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:03:34.505157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:03:34.505200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:03:34.505347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:03:34.779168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.780982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:03:34.781769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:42.358025Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:42.358121Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:42.358156Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:42.749797Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:42.749901Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:42.749990Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:42.750023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.159276Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.159360Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.159434Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.159466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.571153Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.571255Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.571343Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.571374Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.996378Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.996464Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:43.996541Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:43.996573Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:44.433769Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:44.433863Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:44.433948Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:44.433983Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:44.856656Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:44.856744Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:44.856853Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:44.856900Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:45.310784Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:45.310886Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:05:45.311000Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:250:2160], Recipient [7:250:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:45.311041Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:05:45.417205Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1493:3243], Recipient [7:250:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:05:45.417291Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:05:45.417433Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:05:45.417694Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 222us result status StatusPathDoesNotExist 2026-01-10T12:05:45.417891Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:05:45.418441Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1494:3244], Recipient [7:250:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:05:45.418492Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:05:45.418591Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:05:45.418753Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 166us result status StatusPathDoesNotExist 2026-01-10T12:05:45.418893Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:05:45.419371Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1495:3245], Recipient [7:250:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2026-01-10T12:05:45.419422Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:05:45.419519Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:05:45.419701Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 187us result status StatusPathDoesNotExist 2026-01-10T12:05:45.419838Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TCacheTest::SystemViews [GOOD] >> TCacheTest::TableSchemaVersion >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] >> TCacheTest::WatchRoot >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] >> DataShardWrite::UpsertImmediate >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> TCacheTest::TableSchemaVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 10309, MsgBus: 6121 2026-01-10T12:05:02.957976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702766127090954:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:02.958026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:03.849124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:03.849234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:03.957239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:04.185091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:04.185510Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:04.189843Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.135649s 2026-01-10T12:05:04.189914Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.135747s 2026-01-10T12:05:04.224299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:04.454427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:04.520614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:04.520635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:04.520650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:04.520738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:05.648980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:05.661977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:05:05.681403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-10T12:05:05.714218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-10T12:05:05.736946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31422, MsgBus: 14271 2026-01-10T12:05:14.121816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:14.121998Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:14.372155Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:14.420482Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702810633843032:2081] 1768046713944443 != 1768046713944446 2026-01-10T12:05:14.484475Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:14.506000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:14.506078Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:14.602370Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:14.892183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:14.928722Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:14.928755Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:14.928770Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:14.928849Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:15.077291Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:15.906050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:15.934589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-10T12:05:15.981345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-10T12:05:16.008391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:16.231533Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593702823518745850:2553] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:16.231907Z node 2 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 64907, MsgBus: 15752 2026-01-10T12:05:20.935938Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593702843643788970:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:20.936069Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:21.048185Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:21.292596Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593702843643788808:2081] 1768046720893826 != 1768046720893829 2026-01-10T12:05:21.320328Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:21.322162Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:21.322231Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:21.352218Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:21.363204Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:21.592964Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:21.592987Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:21.592995Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:21.593062Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:21.712292Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:21.964225Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:22.675942Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 720575940 ... 72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:30.554419Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:30.576699Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:30.866835Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:30.866860Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:30.866868Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:30.866942Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:31.046339Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:31.240813Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:32.285232Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:32.296827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:32.325451Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:32.380917Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) Trying to start YDB, gRPC: 20879, MsgBus: 5374 2026-01-10T12:05:37.901651Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593702913661599515:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:37.912803Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:37.913017Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:38.122636Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:38.123357Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:38.128349Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593702913661599345:2081] 1768046737828855 != 1768046737828858 2026-01-10T12:05:38.137051Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:38.137134Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:38.164403Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:38.305141Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:38.305164Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:38.305174Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:38.305260Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:38.307416Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:38.831491Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:39.208049Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:39.218747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:39.266765Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.430773Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702935136436814:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.430903Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.431699Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702935136436830:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.431772Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702935136436831:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.431813Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593702935136436832:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.432016Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:42.438527Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:42.442316Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593702935136436839:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:05:42.450402Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593702935136436837:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:05:42.450402Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593702935136436836:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:05:42.534100Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593702935136436885:2600] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:42.556716Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593702935136436903:2608] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:43.040825Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593702913661599515:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:43.054487Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:43.445147Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:05:44.199617Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:44.348963Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TCacheTest::MigrationCommit [GOOD] >> TCacheTest::CookiesArePreserved >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] Test command err: 2026-01-10T12:05:47.187968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:47.188037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-10T12:05:47.274285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2026-01-10T12:05:47.959797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:47.959867Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-10T12:05:48.012044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-10T12:05:48.018967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:05:48.027339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:05:48.028167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2026-01-10T12:05:48.033804Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:214:2199], for# user1@builtin, access# DescribeSchema 2026-01-10T12:05:48.034895Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:220:2205], for# user1@builtin, access# |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] >> DataShardWrite::AsyncIndexKeySizeConstraint |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |84.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TCacheTest::CookiesArePreserved [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2026-01-10T12:05:47.582307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:47.582372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... _COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:48.580421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-10T12:05:48.630986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:05:48.867750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken >> DataShardWrite::ReplaceImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2026-01-10T12:05:47.141249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:47.141317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... RDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:49.931153Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-10T12:05:49.942480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:05:49.950173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 30350, MsgBus: 61848 2026-01-10T12:05:39.793732Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702922339618426:2246];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:39.793786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:40.300187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:40.364946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.365029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.515858Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702922339618214:2081] 1768046739696529 != 1768046739696532 2026-01-10T12:05:40.522674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:40.525083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:40.746072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:40.800650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:40.828045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:40.828067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:40.828091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:40.828202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:41.619241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:41.730207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.021658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.329994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.446132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:44.796193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702922339618426:2246];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:44.796272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:45.365973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702948109423873:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.366081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.366534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702948109423883:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.366597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.810674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.873836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.920898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.972254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.013689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.107160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.202583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.260218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.379209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702952404392058:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.379335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.380104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702952404392063:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.380162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702952404392064:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.380325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.390075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:46.413530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702952404392067:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:46.507483Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702952404392118:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:48.516493Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593702960994327040:2544], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:05:48.518934Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjJmMDNjNzktN2JhYTYzNDMtYTM1ZTkzNmMtNzFkN2Y4YWE=, ActorId: [1:7593702960994327031:2538], ActorState: ExecuteState, LegacyTraceId: 01kekwrrqxevaqs2pfqwd8hcak, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> DataShardWrite::UpsertWithDefaults >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct >> KqpBatchUpdate::Large_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 29696, MsgBus: 1808 2026-01-10T12:04:56.850667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702740294118180:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:56.850793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:57.500064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:57.559193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:57.559382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:57.788258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702740294118052:2081] 1768046696763412 != 1768046696763415 2026-01-10T12:04:57.800702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:57.816837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:57.857138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:57.897064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:58.224716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:58.224738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:58.224757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:58.224835Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:59.404808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:59.417767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:59.483970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:05:01.852202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702740294118180:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:01.852296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:02.150571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702766063922766:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.150682Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.152671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702766063922776:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.152746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.586046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.745538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.814235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.872434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.958284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702766063923083:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.958382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.960447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702766063923088:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.960518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702766063923089:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.960678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:02.969633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:02.991714Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702766063923092:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-10T12:05:03.162123Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702770358890439:2774] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18890, MsgBus: 62854 2026-01-10T12:05:05.477539Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702779641757379:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:05.483242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:05.560558Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.007273s 2026-01-10T12:05:05.578554Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:05.838146Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:05.838254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:05.840264Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:05.848032Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702779641757309:2081] 1768046705415768 != 1768046705415771 2026-01-10T12:05:05.864005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:05.867356Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:06.028778Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:06.028801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:06.028809Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:06.028895Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:06.358852Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:06.415378Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:06.976438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but p ... 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 29101, MsgBus: 15989 2026-01-10T12:05:40.276998Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593702927503392664:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:40.277061Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:40.306982Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:40.460496Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:40.462312Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.462478Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.479761Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:40.482498Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:40.716822Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:40.716872Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:40.716884Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:40.716980Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:41.281382Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:41.435052Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:41.450753Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:41.469541Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:41.610835Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:41.984843Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.155886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.284228Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702927503392664:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:45.284327Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:45.372261Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702948978230944:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.372422Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.376251Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702948978230954:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.376385Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.518669Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.579538Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.632534Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.675184Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.728861Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.830412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.908714Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.007318Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.137960Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702953273199129:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.138085Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.138857Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702953273199134:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.138949Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702953273199135:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.139331Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.145892Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:46.167173Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702953273199138:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:46.255175Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702953273199189:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> KqpRboPg::TestCrossInnerJoin-ColumnStore >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> KqpRboYql::ConstantFolding+ColumnStore >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> KqpRboPg::Bench_Select >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets >> KqpRboPg::Filter >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> KqpRboPg::Select >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::BridgeGroupDegradedInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile |84.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] >> KqpRboPg::Bench_Select [GOOD] >> KqpRboPg::Bench_Filter >> KqpRboPg::TestCrossInnerJoin-ColumnStore [GOOD] >> KqpRboPg::UnionAll >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> KqpRboPg::Select [GOOD] >> KqpRboPg::TestCrossInnerJoin+ColumnStore >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> KqpRboPg::Filter [GOOD] >> KqpRboPg::OrderBy >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::IncrementImmediate >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] Test command err: Trying to start YDB, gRPC: 13414, MsgBus: 22100 2026-01-10T12:04:58.384957Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702749316088990:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:58.438924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:58.494194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:59.445263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:59.530748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:59.530868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:59.544777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:59.599938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:59.743895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:59.760219Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702749316088940:2081] 1768046698319185 != 1768046698319188 2026-01-10T12:04:59.832200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:00.024722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:00.024747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:00.024753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:00.024824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:01.082732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:01.096054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:05:01.249112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.631723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.957492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.128039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:03.356272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702749316088990:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:03.356361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:05.594599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702779380861911:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.594723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.595229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702779380861921:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.597947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.052181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.147010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.205684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.270343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.355120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.430655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.491267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.550157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.644769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702783675830092:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.644903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.645213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702783675830098:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.645251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702783675830097:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.645280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.650316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:06.672999Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702783675830101:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:05:06.734487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702783675830152:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... 3702969392551789:2081] 1768046750505624 != 1768046750505627 2026-01-10T12:05:51.026437Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:51.046464Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:51.200819Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:51.200848Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:51.200859Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:51.200956Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:51.410961Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:51.576264Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:51.934076Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:51.940973Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:51.952433Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:52.140255Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:52.426286Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:52.577186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:55.559899Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702969392551925:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:55.559983Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:56.534298Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702995162357436:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:56.534419Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:56.534934Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702995162357446:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:56.535004Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:56.712235Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:56.789797Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:56.878797Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:56.964438Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.020253Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.101843Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.166496Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.250831Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.373060Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702999457325624:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.373161Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.373240Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702999457325629:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.373591Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702999457325631:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.373627Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.378886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:57.407473Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702999457325632:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:57.509422Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702999457325684:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:00.403709Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:06:00.413239Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-01-10T12:06:00.517073Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-10T12:06:00.528654Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |84.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> KqpRboYql::Filter+ColumnStore >> KqpRboYql::ConstantFolding+ColumnStore [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped |84.3%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 7219, MsgBus: 26915 2026-01-10T12:04:04.493681Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702516800356563:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:04.494094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:04.872298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:04.890265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:04.890371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:04.912731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:05.063525Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:05.068230Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702516800356535:2081] 1768046644475491 != 1768046644475494 2026-01-10T12:04:05.172177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:05.220432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:05.220454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:05.220460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:05.220555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:05.518196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:05.700723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:05.724929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:05.812163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:05.969713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:06.195585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:06.301898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.428691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702533980227599:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.428984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.429462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702533980227609:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.429559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.880303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.920150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.962343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.008099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.067606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.111888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.169640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.254150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.365399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538275195783:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.365539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.367707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538275195788:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.367760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538275195789:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.368098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.371655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:09.390350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702538275195792:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:09.449795Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702538275195845:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:09.493036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702516800356563:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:09.493134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=col ... 0;fline=native.cpp:71;event=parsing;size=336;columns=1; Trying to start YDB, gRPC: 4274, MsgBus: 25776 2026-01-10T12:05:44.454118Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7593702946831876390:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:44.454181Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:44.537308Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:44.693065Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:44.693194Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:44.728003Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:44.752173Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:44.753461Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:44.913035Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:44.913070Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:44.913085Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:44.913226Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:45.337256Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:45.522220Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:45.934113Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:49.458676Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593702946831876390:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:49.458801Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:51.616880Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593702976896648336:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:51.616972Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593702976896648350:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:51.617056Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:51.620262Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593702976896648354:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:51.620382Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:51.625118Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:51.652578Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593702976896648353:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:05:51.709303Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593702976896648406:2552] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=344;columns=1; Trying to start YDB, gRPC: 11207, MsgBus: 24016 2026-01-10T12:05:53.398347Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7593702985727248004:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:53.398438Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:53.516192Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:53.574599Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:53.581209Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.581361Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.586018Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:53.755154Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:53.755179Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:53.755198Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:53.755331Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:53.775891Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:54.410490Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:54.830783Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:58.398517Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7593702985727248004:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:58.398633Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:01.866063Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703020086987224:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:01.866241Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:01.866749Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703020086987236:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:01.866822Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703020086987237:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:01.867057Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:01.874959Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:01.899146Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7593703020086987240:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:06:01.983805Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7593703020086987291:2548] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap >> KqpRboPg::Bench_Filter [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn >> KqpRboPg::UnionAll [GOOD] >> DataShardWrite::UpsertPrepared+Volatile [GOOD] |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn >> KqpRboPg::Bench_CrossFilter >> KqpRboYql::Aggregation+ColumnStore >> KqpRboPg::ConstantFolding >> KqpRboYql::Select >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpTypes::DyNumberCompare [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] >> DataShardWrite::CancelImmediate [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> THealthCheckTest::TestOnlyRequestNeededTablets >> KqpRboPg::OrderBy [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None >> KqpRboPg::LeftJoinToKqpOpJoin >> KqpTypes::MultipleCurrentUtcTimestamp >> DataShardWrite::DeletePrepared+Volatile >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpdateImmediate [GOOD] >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> KqpRboPg::Aggregation+ColumnStore >> DataShardWrite::IncrementImmediate [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles >> DataShardWrite::UpsertPreparedManyTables-Volatile >> THealthCheckTest::ShardsLimit800 >> THealthCheckTest::StorageNoQuota >> DataShardWrite::RejectOnChangeQueueOverflow >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> THealthCheckTest::ShardsNoLimit >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 [GOOD] >> KqpRboYql::BasicJoins >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 17312, MsgBus: 13593 2026-01-10T12:04:59.091719Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702752890918525:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:59.091992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:59.779763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:59.847351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:59.847487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:59.941312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:00.229648Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:00.322997Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702752890918306:2081] 1768046699077165 != 1768046699077168 2026-01-10T12:05:00.372276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:00.432189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:00.659942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:00.659969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:00.659981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:00.660114Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:01.737215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:01.753004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:05:01.851359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.130880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.392438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.542604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.096320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702752890918525:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:04.096407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:05.365595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702778660723968:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.365894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.372512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702778660723978:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.372619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:05.764154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:05.840380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:05.906009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:05.965646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.041069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.108711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.230308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.366482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:06.542441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702782955692148:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.542530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.542830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702782955692153:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.542866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702782955692154:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.543004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.546928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:06.587914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702782955692157:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:05:06.665120Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702782955692208:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:10.396611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... p:440: Table profiles were not loaded 2026-01-10T12:05:54.417455Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593702986757634212:2081] 1768046754222620 != 1768046754222623 2026-01-10T12:05:54.443140Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.528436Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.055575s 2026-01-10T12:05:54.589139Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:54.596763Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:54.596797Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:54.596807Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:54.596916Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:55.191754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:55.204980Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:55.216414Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:55.263340Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:55.310073Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:55.584036Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:55.679460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.236233Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702986757634379:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:59.236344Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:59.745117Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703008232472566:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.745219Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.745557Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703008232472575:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.745606Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.848700Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.908039Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.942950Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.998621Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.045796Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.105792Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.186520Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.335105Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.443327Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703012527440763:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:00.443427Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:00.443489Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703012527440768:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:00.443626Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703012527440770:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:00.443664Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:00.448053Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:00.472108Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703012527440772:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:06:00.563595Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703012527440831:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:03.665801Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:06:03.710947Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:06:03.785148Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> DataShardWrite::UpsertPreparedManyTables+Volatile >> KqpRboPg::ConstantFolding [GOOD] >> KqpRboPg::ExpressionSubquery >> KqpRboYql::Select [GOOD] >> KqpRboYql::TPCH_YDB_PERF ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 6399, MsgBus: 25365 2026-01-10T12:04:59.285871Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702752493275817:2224];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:59.285961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:59.871755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:59.949775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:59.949900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:00.116225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702752493275608:2081] 1768046699241124 != 1768046699241127 2026-01-10T12:05:00.126239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:00.134324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:00.141892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:00.296803Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:00.486372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:00.486403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:00.486409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:00.486496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:01.492660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:01.503470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:01.610806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:01.903805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.255095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:02.543354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:04.287401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702752493275817:2224];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:04.287466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:06.893170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702782558048578:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.893275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.893698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702782558048588:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:06.893755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:07.549809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.629768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.696419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.742808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.796300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.863020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:07.955949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:08.048817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:08.362030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702791147984075:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:08.362109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:08.362527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702791147984080:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:08.362567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702791147984081:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:08.362846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:08.367404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:08.388753Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702791147984084:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:08.460817Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702791147984137:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:11.389897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... _operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:50.176570Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:50.298643Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:53.576226Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593702961887542599:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:53.576319Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:53.688258Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702983362380896:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:53.688376Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:53.689070Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702983362380906:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:53.689153Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:53.812140Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:53.886815Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:53.941950Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.044037Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.101558Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.164799Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.241078Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.328537Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.441548Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702987657349073:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:54.441638Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:54.441679Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702987657349078:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:54.442086Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593702987657349080:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:54.442138Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:54.445699Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:54.466327Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593702987657349081:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:54.531525Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593702987657349133:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:57.512342Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.244814Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:05:58.850376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.479045Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:00.107186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:06:00.645317Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:06:01.339904Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:01.436748Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-10T12:06:03.774096Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:06:03.774125Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:06.565981Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710732:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 22842, MsgBus: 7544 2026-01-10T12:04:05.612972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702520031928046:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:05.613011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:05.667460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:06.170811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:06.176776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:06.176913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:06.222682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:06.364067Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:06.371309Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702520031927928:2081] 1768046645585257 != 1768046645585260 2026-01-10T12:04:06.419795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:06.484692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:06.484713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:06.484749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:06.484852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:06.629886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:07.039528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:07.109737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:07.261653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:07.463464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:07.560426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.764610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702537211798983:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.764754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.765124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702537211798993:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.765180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.134943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.180482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.261368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.320642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.404658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.455739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.507484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.551620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:10.613421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702520031928046:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:10.613484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:10.631290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702541506767158:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.631403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.631714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702541506767163:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.631754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702541506767164:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.632057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:10.636722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:10.656510Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702541506767167:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:10.727914Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702541506767218:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7191, MsgBus: 3698 2026-01-10T12:04:13.781385Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline= ... 735:2081] 1768046751678202 != 1768046751678205 2026-01-10T12:05:52.011106Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:52.024803Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:52.031926Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:52.032061Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:52.089714Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:52.215793Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:52.215825Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:52.215839Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:52.215967Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:52.563105Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:52.799766Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:53.205146Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:53.214346Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:56.682152Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593702977417673762:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:56.682268Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:58.731539Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703007482445710:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.731706Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.732303Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703007482445722:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.732371Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703007482445723:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.732852Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.740649Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:58.758589Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593703007482445726:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:05:58.828884Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593703007482445777:2547] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 24812, MsgBus: 14471 2026-01-10T12:06:00.774453Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7593703013671429336:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:00.774534Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:00.869379Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:01.055168Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:01.096189Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:01.101215Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:01.101353Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:01.118920Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:01.152879Z node 13 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.009117s 2026-01-10T12:06:01.279741Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:01.279772Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:01.279789Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:01.279908Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:01.437334Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:01.820901Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:02.357833Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:02.378643Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:05.776637Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7593703013671429336:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:05.776755Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:09.089442Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703052326135832:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.089527Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703052326135866:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.089643Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.090801Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703052326135871:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.090899Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.095105Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:09.137273Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7593703052326135870:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:06:09.203970Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7593703052326135923:2545] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> KqpRboPg::Bench_CrossFilter [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> KqpBatchDelete::Large_1 [GOOD] >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow >> KqpRboPg::Bench_JoinFilter >> KqpRboPg::LeftJoinToKqpOpJoin [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> KqpRboYql::Filter+ColumnStore [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> THealthCheckTest::BridgeTimeDifference >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] >> DataShardWrite::PreparedDistributedWritePageFault >> KqpRboPg::FiveJoinsCBO >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> KqpRboYql::Filter-ColumnStore |84.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |84.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |84.3%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 19465, MsgBus: 21584 2026-01-10T12:05:39.809775Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702924491814437:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:39.809821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:39.867358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:40.419723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.419823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.422733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:40.429762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:40.499192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:40.668151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:40.776665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:40.776690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:40.776696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:40.776779Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:40.811636Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:41.544705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:41.610293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:41.840625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.028175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:42.121574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:44.809866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702924491814437:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:44.809942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:45.189653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702950261619829:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.189829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.190224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702950261619839:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.190262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:45.565756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.602709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.638523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.678896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.724764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.794880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.857929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:45.930197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:46.070280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702954556588008:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.070386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.070704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702954556588014:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.070753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.070808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702954556588013:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:46.080367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:46.097894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702954556588017:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:46.177879Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702954556588070:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:05:48.225653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesha ... ng to start YDB, gRPC: 23894, MsgBus: 20759 2026-01-10T12:06:04.332015Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703030279002560:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:04.332071Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:04.373633Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:04.523373Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:04.537888Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:04.537993Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:04.542746Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:04.618488Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:04.634229Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:04.634272Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:04.634294Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:04.634423Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:05.337976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:05.394266Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:05.404915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:05.429755Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:05.566085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:05.778998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:05.872548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.196363Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703051753840882:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.196513Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.200432Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703051753840892:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.200576Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.286798Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.332799Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593703030279002560:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:09.332868Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:09.367219Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.430170Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.481454Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.528613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.585982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.640434Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.724481Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.859487Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703051753841769:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.859631Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.860095Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703051753841774:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.860096Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703051753841775:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.860147Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:09.868601Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:09.901300Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703051753841778:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:06:09.979567Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703051753841829:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:12.804578Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> KqpRboPg::ExpressionSubquery [GOOD] >> KqpRboPg::FallbackToYqlEnabled |84.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped |84.3%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] >> KqpRboPg::TestCrossInnerJoin+ColumnStore [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> THealthCheckTest::StorageNoQuota [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] >> KqpRboPg::PredicatePushdownLeftJoin >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TTxDataShardRecomputeKMeansScan::BadRequest >> THealthCheckTest::TestBootingTabletIsNotDead >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-10T12:06:18.048380Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.048553Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.048588Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.049109Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:18.049175Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.049207Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.050396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005577s 2026-01-10T12:06:18.058521Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.059171Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:18.059325Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.064059Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.064087Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.064101Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.074792Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:18.074848Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.074882Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.074950Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007739s 2026-01-10T12:06:18.075404Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.077395Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:18.077482Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.079720Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.079745Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.079766Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.088364Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:06:18.088413Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.088449Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.088550Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.290101s 2026-01-10T12:06:18.089244Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.089707Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:18.089797Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.090816Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.090834Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.090852Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.091215Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:06:18.091258Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.091283Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.091351Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.169090s 2026-01-10T12:06:18.097501Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.098161Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:18.098299Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.099624Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.099651Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.099674Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.099985Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.114485Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:18.134870Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.135198Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-10T12:06:18.135234Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.135254Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.135313Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.179746s 2026-01-10T12:06:18.135536Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:06:18.136048Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.141361Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.141395Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.141415Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.141797Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.142259Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:18.142486Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.143041Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:18.244174Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.244453Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:06:18.244516Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:18.244558Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:06:18.244621Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:06:18.345050Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:06:18.345235Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:06:18.346489Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.346589Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.346613Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:18.347031Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:18.351196Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:18.351411Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.351936Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:18.457272Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:18.457522Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:06:18.457574Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:18.457612Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:06:18.457690Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-10T12:06:18.457798Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:06:18.458073Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:06:18.458145Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:06:18.458254Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2026-01-10T12:05:41.175623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:41.175698Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ted because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-10T12:05:43.524933Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.525067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-10T12:05:43.525114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-10T12:05:43.525150Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.525378Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.525570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-10T12:05:43.525640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-10T12:05:43.525685Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.525777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-10T12:05:43.525821Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.525933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-10T12:05:43.525975Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-10T12:05:43.526333Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526422Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-10T12:05:43.526608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-10T12:05:43.526662Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-10T12:05:43.526767Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526847Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-10T12:05:43.526941Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.526981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-10T12:05:43.527020Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.527062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-10T12:05:43.527116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-10T12:05:43.527151Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.527192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-10T12:05:43.527261Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.527335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-10T12:05:43.527378Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.527619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-10T12:05:43.527664Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-10T12:05:43.527729Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:688:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-10T12:05:43.529176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:05:43.529264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:05:43.529641Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-10T12:05:43.529797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:05:43.561909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-10T12:05:43.562623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-10T12:05:43.629522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:774:2668] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:774:2668] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:779:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:779:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:781:2672] sender: [2:783:2067] recipient: [2:774:2668] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2026-01-10T12:05:46.287650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:05:46.287756Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:46.348451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:05:46.348530Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> KqpRboPg::FiveJoinsCBO [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 9933, MsgBus: 8843 ... waiting for SysViewsRoster update finished 2026-01-10T12:03:48.464795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:03:48.593734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:03:48.615510Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:03:48.616061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:03:48.616215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:03:49.159567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:49.159737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:49.177366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046625244587 != 1768046625244590 2026-01-10T12:03:49.191957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:49.281208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:49.430870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:03:49.911649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:03:49.911717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:03:49.911752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:03:49.915753Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:03:49.930850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:50.439946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:03:50.586774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:50.837770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.261161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:51.633043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.649339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.649704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.650683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.650976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:52.685270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:52.890238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.189624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.475918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:53.759373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.154088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.502607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:54.914416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:03:55.700615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:55.700743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:55.701141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:55.701208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:55.701268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:03:55.711478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:03:55.877228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2805:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:03:55.939218Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2865:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:03:57.583062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... mbda
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 8628, MsgBus: 9308 2026-01-10T12:06:07.939197Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703043231107528:2132];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:07.939246Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:08.046085Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:08.208452Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703043231107428:2081] 1768046767930592 != 1768046767930595 2026-01-10T12:06:08.258191Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:08.264852Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:08.264962Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:08.304283Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:08.470616Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:08.515817Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:08.515840Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:08.515848Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:08.515935Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:09.009857Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:09.557415Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:09.565182Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:06:09.579041Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:09.745382Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:10.218819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:10.346457Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:12.940217Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593703043231107528:2132];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:12.940314Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:14.007043Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703069000913105:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.007186Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.007790Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703073295880411:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.007923Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.150161Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.219333Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.270795Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.332526Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.394720Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.479905Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.575080Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.759668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:14.923577Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703073295881286:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.923721Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.924675Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703073295881291:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.924780Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703073295881292:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.926615Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:14.933322Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:14.959627Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703073295881295:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:06:15.058765Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703077590848646:3781] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpRboPg::Bench_JoinFilter [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-10T12:06:21.890304Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.890334Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.890359Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:21.898570Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:21.899576Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:21.899647Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.900856Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.900882Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.900902Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:21.908734Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:21.911275Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:06:21.911344Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.917877Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.917904Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.917928Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:21.923349Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:21.923417Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.923457Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.927670Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-10T12:06:21.930732Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.930758Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.930777Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:21.948028Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:06:21.948100Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.948133Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:21.948209Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-10T12:06:21.951223Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:06:21.951258Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:06:21.951282Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:21.964678Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:21.972393Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:22.004906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:06:22.005996Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:22.006367Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-10T12:06:22.010141Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-10T12:06:22.010680Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:22.010729Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:06:22.010760Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:06:22.010782Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-10T12:06:22.010812Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-10T12:06:22.010834Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-10T12:06:22.010852Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-10T12:06:22.010871Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-10T12:06:22.010952Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-10T12:06:22.010970Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-10T12:06:22.010988Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-10T12:06:22.011026Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-10T12:06:22.011049Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-10T12:06:22.011068Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-10T12:06:22.011086Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-10T12:06:22.011103Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-10T12:06:22.011166Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-10T12:06:22.011187Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-10T12:06:22.011203Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-10T12:06:22.011221Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-10T12:06:22.011237Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-10T12:06:22.011257Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-10T12:06:22.011275Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-10T12:06:22.011293Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-10T12:06:22.011321Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-10T12:06:22.011346Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-10T12:06:22.011363Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-10T12:06:22.011391Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-10T12:06:22.011415Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-10T12:06:22.011433Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-10T12:06:22.011456Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-10T12:06:22.011473Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-10T12:06:22.011546Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-10T12:06:22.011575Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-10T12:06:22.011595Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-10T12:06:22.011612Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-10T12:06:22.011628Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-10T12:06:22.011648Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-10T12:06:22.011677Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-10T12:06:22.011697Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-10T12:06:22.011714Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-10T12:06:22.011733Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-10T12:06:22.011749Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-10T12:06:22.011773Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-10T12:06:22.011795Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-10T12:06:22.011813Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-10T12:06:22.011829Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-10T12:06:22.011846Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-10T12:06:22.011862Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-10T12:06:22.011890Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-10T12:06:22.011960Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-10T12:06:22.014447Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-10T12:06:22.015502Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-10T12:06:22.015547Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-10T12:06:22.015570Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-10T12:06:22.015591Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-10T12:06:22.015618Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-10T12:06:22.015636Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-10T12:06:22.015652Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-10T12:06:22.015682Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-10T12:06:22.015719Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-10T12:06:22.015736Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-10T12:06:22.015757Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-10T12:06:22.015775Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-10T12:06:22.015792Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-10T12:06:22.015812Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-10T12:06:22.015840Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-10T12:06:22.015860Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-10T12:06:22.015900Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-10T12:06:22.015918Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-10T12:06:22.015956Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-10T12:06:22.015979Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-10T12:06:22.015997Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-10T12:06:22.016014Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-10T12:06:22.016031Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-10T12:06:22.016049Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-10T12:06:22.016106Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-10T12:06:22.016135Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-10T12:06:22.016153Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-10T12:06:22.016187Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-10T12:06:22.016208Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-10T12:06:22.016227Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-10T12:06:22.016255Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-10T12:06:22.016277Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-10T12:06:22.016355Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-10T12:06:22.016375Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-10T12:06:22.016391Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-10T12:06:22.016407Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-10T12:06:22.016423Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-10T12:06:22.016440Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-10T12:06:22.016459Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-10T12:06:22.016474Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-10T12:06:22.016498Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-10T12:06:22.016530Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-10T12:06:22.016546Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-10T12:06:22.016564Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-10T12:06:22.016579Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-10T12:06:22.016598Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-10T12:06:22.016626Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-10T12:06:22.016647Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-10T12:06:22.016663Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-10T12:06:22.016679Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-10T12:06:22.016732Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-10T12:06:22.016868Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:06:22.020866Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.020895Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.020918Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:22.028422Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:22.028880Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:22.029129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.032162Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:22.133251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.133510Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:06:22.133577Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:22.133633Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:06:22.133694Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:06:22.336626Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-10T12:06:22.441792Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:06:22.441930Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:06:22.442109Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:06:22.443324Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.443348Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.443367Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:22.444744Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:22.445597Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:22.445811Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.446352Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:22.553273Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:22.553540Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:06:22.553609Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:22.553655Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:06:22.553733Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-10T12:06:22.553853Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:06:22.558639Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:06:22.558746Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:06:22.559382Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |84.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> KqpRboYql::Filter-ColumnStore [GOOD] >> KqpRboYql::ExpressionSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] Test command err: 2026-01-10T12:05:05.492697Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702776481679110:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:05.501126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:05.851624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:05.854444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:05.854534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:05.942036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:06.040306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702776481678959:2081] 1768046705362206 != 1768046705362209 2026-01-10T12:05:06.060918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:06.104906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:06.304349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:06.304371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:06.304390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:06.304478Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:06.476251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:06.604448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:10.253660Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593702798625872277:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:10.253825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:10.323072Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:10.572188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:10.582324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:10.582422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:10.637758Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593702798625872062:2081] 1768046710183010 != 1768046710183013 2026-01-10T12:05:10.638673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:10.664428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:10.847477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:10.847509Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:10.847517Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:10.847640Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:10.857672Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:11.175120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:11.242427Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:05:25.383848Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:25.384050Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:25.395708Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:25.396984Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:25.399755Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:25.400379Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:25.400660Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:25.402674Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:25.402840Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:25.402988Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:26.040809Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:26.175801Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:26.175947Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:26.176679Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:26.176757Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:26.251676Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:26.252698Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:26.253049Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:26.343253Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:26.358570Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:27.558976Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:27.559046Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:27.559092Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:27.559835Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:39.236458Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.237614Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.241097Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:39.259730Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:39.261323Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:39.262014Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:39.262102Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_ ... METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:04.474076Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:04.474620Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:04.474674Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:05.131375Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:05.386685Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:05.386889Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:05.387856Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:05.387977Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:05.444383Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-10T12:06:05.445359Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:05.445918Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:05.525444Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:05.566130Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:07.047818Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:07.047902Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:07.047961Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:07.048561Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status: ORANGE message: "Group dead in some piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" type: "STORAGE_GROUP" level: 4 } issue_log { id: "ORANGE-cf29-1231c6b1-f7549920" status: ORANGE message: "Pool has no redundancy" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "ORANGE-a3e2-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "ORANGE-5cc9-1231c6b1" status: ORANGE message: "Database has storage issues" location { database { name: "/Root" } } reason: "ORANGE-af30-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-10T12:06:17.614308Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:17.614847Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:17.629526Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:17.630785Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:17.633347Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:497:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:17.633883Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:17.634187Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:17.636331Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:490:2167], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:17.636632Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:17.636776Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:18.255279Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:18.475181Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:18.476823Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:18.478456Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:18.478585Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:18.539822Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-10T12:06:18.540766Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:18.541465Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:18.621365Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:18.664871Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:19.957804Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:19.957889Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:19.957941Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:19.958519Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-6100-1231c6b1-2147483648" status: RED message: "Group dead in all piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" reason: "RED-1a83-1231c6b1-2147483650" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-40f1-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-6100-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483650" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483650" pile { name: "2" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483650-3-58-0-58" id: "2147483650-3-59-0-59" id: "2147483650-3-60-0-60" } pile { name: "2" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-5cc9-1231c6b1" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" type: "DATABASE" level: 1 } location { id: 11 host: "::1" port: 12001 pile { name: "pile0" } } |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |84.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> THealthCheckTest::ShardsNoLimit [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FiveJoinsCBO [GOOD] Test command err: Trying to start YDB, gRPC: 7891, MsgBus: 7270 2026-01-10T12:05:54.785813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702988686746331:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:54.800376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:55.346161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:55.488290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:55.488384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:55.539405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:55.601221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:55.618134Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702988686746109:2081] 1768046754722634 != 1768046754722637 2026-01-10T12:05:55.632065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:55.788449Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:55.895970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:55.895996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:55.896004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:55.896130Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:56.704850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:56.716962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:59.401688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703010161583482:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.401820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.402142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703010161583492:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.402195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.726815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:59.785378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702988686746331:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:59.785453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:59.921531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703010161583596:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.921618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.922080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703010161583601:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.922115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703010161583602:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.922130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.926846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:59.944258Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703010161583605:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:06:00.073547Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703014456550952:2605] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10193, MsgBus: 18875 2026-01-10T12:06:01.855019Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703017538059589:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:01.855463Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:01.894565Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:02.100172Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:02.133733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:02.133814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:02.136175Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703017538059422:2081] 1768046761813630 != 1768046761813633 2026-01-10T12:06:02.192890Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:02.204804Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:02.320807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:02.367431Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:02.367464Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:02.367472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:02.367567Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:02.803635Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:03.114233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:03.137013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:06.609116Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703039012896787:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:06.609219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:06.609797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703039012896797:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:06.609903Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_worklo ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:13.831961Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703069892357033:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.832064Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.833197Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703069892357038:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.833264Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703069892357039:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.833388Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.838989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:13.857694Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703069892357043:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:06:13.947965Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703069892357096:2764] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27196, MsgBus: 20307 2026-01-10T12:06:15.942973Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:15.948778Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:16.082429Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:16.082514Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:16.090462Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:16.102215Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703076923502359:2081] 1768046775833054 != 1768046775833057 2026-01-10T12:06:16.102647Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:16.242009Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:16.360760Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:16.360787Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:16.360795Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:16.360885Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:16.932598Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:17.176633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:17.188889Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:06:20.847158Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703098398339720:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:20.847282Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:20.847786Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703098398339730:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:20.847850Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:20.906302Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:20.992271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:21.034817Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:21.094127Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:21.151412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:21.214491Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:21.332571Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703102693307478:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:21.332698Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:21.333329Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703102693307483:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:21.333381Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703102693307484:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:21.333427Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:21.343278Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:21.370509Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703102693307487:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2026-01-10T12:06:21.444961Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703102693307539:2830] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:21.797443Z node 4 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[4:7593703102693307620:2399], 0} |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |84.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 17371, MsgBus: 20062 2026-01-10T12:05:53.861970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702983593590990:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:53.862522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:54.176223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:54.176413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.176528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.301357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.305552Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702983593590870:2081] 1768046753833191 != 1768046753833194 2026-01-10T12:05:54.331112Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:54.371423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:54.472638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:54.472705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:54.472719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:54.472880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:54.864981Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:55.014249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:57.478404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703000773460940:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.478552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.478757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703000773460948:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.482812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703000773460954:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.482990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.487505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:57.503782Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703000773460955:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:05:57.620944Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703000773461007:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13727, MsgBus: 27620 2026-01-10T12:05:59.292627Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703009402567088:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:59.293356Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:59.322488Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:59.440292Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:59.470097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:59.470185Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:59.496780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:59.574680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:59.574704Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:59.574710Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:59.574782Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:59.597536Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:00.166461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:00.301056Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:03.738894Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703026582437116:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:03.739006Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:03.744463Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703026582437126:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:03.744552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.060817Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:04.264246Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703030877404519:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.264365Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.267812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703030877404524:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.267883Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703030877404525:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.267987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.276290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:04.292265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703009402567088:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:04.292324Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=access ... pool default not found or you don't have access permissions } 2026-01-10T12:06:11.932348Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.991558Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:12.062736Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:12.223499Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703064274606078:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:12.223594Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:12.224064Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703064274606083:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:12.224123Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703064274606084:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:12.224418Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:12.229173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:12.249265Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:06:12.251667Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703064274606087:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:06:12.327473Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703064274606138:2648] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3528, MsgBus: 64603 2026-01-10T12:06:14.797766Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703076290869557:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:14.801207Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:14.850993Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:14.959373Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:14.961169Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703076290869536:2081] 1768046774791094 != 1768046774791097 2026-01-10T12:06:14.982836Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:14.982926Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:15.020284Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:15.045721Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:15.112734Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:15.112777Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:15.112789Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:15.112880Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:15.669127Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:15.679494Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:06:15.813544Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:19.527931Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703097765706899:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.528051Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.528484Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703097765706909:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.528558Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.585674Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:19.735073Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:19.799652Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593703076290869557:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:19.802663Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:19.941917Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703097765707080:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.942042Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.942647Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703097765707086:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.942658Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703097765707085:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.942725Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.968202Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:19.994069Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703097765707089:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-10T12:06:20.066550Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703102060674438:2643] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> THealthCheckTest::ShardsLimit800 [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2026-01-10T12:05:48.617420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:05:48.617478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-10T12:05:50.151325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:04:59.940392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:00.112995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:00.127119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:00.127356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:00.127521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:01.081141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:01.245490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:01.245703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:01.333443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:01.406565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:03.342465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:03.342524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:03.342555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:03.343048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:10.495010Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:10.502167Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:10.505184Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:347:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:10.505579Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:10.505731Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:11.283959Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:11.504126Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:11.504299Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:11.550037Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:11.636166Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:13.558700Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:13.558767Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:13.558821Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:13.559530Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:21.551114Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:21.562882Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:21.565448Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:444:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:21.572576Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:21.572801Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:22.085374Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:22.192168Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:22.192317Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:22.231524Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:22.303522Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:24.067756Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:24.067829Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:24.067872Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:24.077074Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:39.055810Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.060182Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.098905Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:39.101179Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:39.103811Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:692:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:39.104309Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:39.104516Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:39.107106Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:687:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:39.107445Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:39.107565Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:39.724995Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:39.942999Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:39.943175Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:39.943727Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:39.943806Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.013931Z node 8 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2026-01-10T12:05:40.016865Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:40.017515Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:40.134919Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:40.200031Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:41.373388Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:41.3734 ... error=incorrect path status: LookupError; 2026-01-10T12:05:52.576828Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:52.577044Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:53.089494Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:53.264574Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.264705Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.265174Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.265252Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.341256Z node 10 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2026-01-10T12:05:53.342175Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:53.342576Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:53.472281Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:53.522550Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:54.695592Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:54.695641Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:54.695666Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:54.696004Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:05.334280Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:05.335610Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:05.338509Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:06:05.349952Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:05.351762Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:05.353172Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:305:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:05.353606Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:05.353774Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:05.357754Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:05.358039Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:05.899209Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:06.029078Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:06.029216Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:06.029779Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:06.029858Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:06.113590Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-10T12:06:06.114428Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:06.114936Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:06.251227Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:06.279934Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:07.486537Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:07.486593Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:07.486621Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:07.487054Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:19.567366Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:19.567677Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:19.603988Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:19.613888Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:19.622412Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:682:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:19.622824Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:19.622987Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:19.626617Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:693:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:19.627033Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:19.627219Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:20.225637Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:20.400758Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:20.400883Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:20.401923Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:20.405380Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:20.461016Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-10T12:06:20.462025Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:20.462407Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:20.544704Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:20.562182Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:21.688008Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:21.688062Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:21.688109Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:21.688495Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> KqpRboYql::OlapPredicatePushdown |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy |84.4%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> PersQueueSdkReadSessionTest::SettingsValidation |84.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboPg::FallbackToYqlEnabled [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> KqpRboPg::FallbackToYqlDisabled >> THealthCheckTest::NoStoragePools >> THealthCheckTest::TestOnlyRequestNeededTablets [GOOD] >> THealthCheckTest::TestNodeDisconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2026-01-10T11:58:58.967244Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701201571101654:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:58.967709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:59.196557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:59.262793Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.015230s 2026-01-10T11:58:59.492654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:59.684582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:59.684698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:59.732175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:59.946267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:59.947248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:59.960023Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-10T11:58:59.960431Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-10T11:58:59.960507Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-10T11:58:59.960638Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-10T11:58:59.960777Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-10T11:58:59.961748Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-10T11:58:59.961772Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-10T11:58:59.961805Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-10T11:58:59.961840Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-10T11:58:59.971170Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-10T11:58:59.971193Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:58:59.971392Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:58:59.976582Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:58:59.976657Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.009497Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.009607Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.009619Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.009653Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.020393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T11:59:00.020410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T11:59:00.020414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T11:59:00.020478Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:59:00.040072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:59:00.252904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:59:00.261380Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.261460Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.261468Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.261494Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.290567Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# MakeDirectoryRequest, traceId# 01kekwca619t0bcaj799ww3jtz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35578, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T11:59:00.294935Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.295012Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.295020Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.295047Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.298586Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.298633Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.298640Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.298667Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.309331Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# CreateVolumeRequest, traceId# 01kekwca6ec9tzqs25v0agzc7t, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35578, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T11:59:00.311751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSolomonVolume, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp:380) 2026-01-10T11:59:00.314533Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.314602Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.314609Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T11:59:00.314643Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T11:59:00.330258Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2026-01-10T11:59:00.331133Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01kekwca7b7zve9q40na0wmjzj, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35578, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef Wait iteration# 0 2026-01-10T11:59:00.332564Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2026-01-10T11:59:00.332930Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2026-01-10T11:59:00.332952Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:68: KeyValue# 72075186224037888 TTxInit flat BuildScheme 2026-01-10T11:59:00.333248Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2026-01-10T11:59:00.333855Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T11:59:00.333886Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T11:59:00.333929Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T11:59:00.333951Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-10T11:59:00.333960Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-10T11:59:00.334471Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ExecuteTransactionRequest, traceId# 01kekwca7efj0cssexercnfsbj, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35578, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T11:59:00.334754Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T11:59:00.334771Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-10T11:59:00.334796Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-10T11:59:00.334810Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2026-01-10T11:59:00.334818Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-10T11:59:00.334826Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2026-01-10T11:59:00.334834Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2026-01-10T11: ... -10T12:06:14.709746Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:06:14.711803Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:06:14.711865Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:06:14.819134Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2026-01-10T12:06:14.819188Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037888 TTxInit flat ReadDb Tree 2026-01-10T12:06:14.819723Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2026-01-10T12:06:14.821718Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037889 TTxInit flat Execute 2026-01-10T12:06:14.821739Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037889 TTxInit flat ReadDb Tree 2026-01-10T12:06:14.821872Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037889 TTxInit flat Execute returns true 2026-01-10T12:06:14.840308Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.840437Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.841188Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.841249Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.841279Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.841298Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.841321Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.841342Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.841360Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.841382Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.843336Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.843409Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.843449Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.843477Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.843503Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.843528Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.855052Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.855111Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.855138Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.855173Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.855326Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-10T12:06:14.855359Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 4 Marker# KV50 2026-01-10T12:06:14.855379Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-10T12:06:14.855390Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 3 Marker# KV50 2026-01-10T12:06:14.855408Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-10T12:06:14.855418Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2026-01-10T12:06:14.855429Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-10T12:06:14.855443Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2026-01-10T12:06:14.855484Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2026-01-10T12:06:14.855568Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Execute 2026-01-10T12:06:14.857135Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-10T12:06:14.857173Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-10T12:06:14.857200Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-10T12:06:14.857230Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-10T12:06:14.857313Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-10T12:06:14.857338Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 4 Marker# KV50 2026-01-10T12:06:14.857354Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-10T12:06:14.857363Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 3 Marker# KV50 2026-01-10T12:06:14.857374Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-10T12:06:14.857382Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 2 Marker# KV50 2026-01-10T12:06:14.857397Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-10T12:06:14.857406Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 1 Marker# KV50 2026-01-10T12:06:14.857433Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037889 SendCutHistory Marker# KV51 2026-01-10T12:06:14.857540Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Execute 2026-01-10T12:06:14.879634Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Complete 2026-01-10T12:06:14.879759Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Complete 2026-01-10T12:06:15.052863Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01kekwsjrcbrpfvs087gkb1ram, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59130, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T12:06:15.079570Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-10T12:06:15.080475Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-10T12:06:15.085315Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037889 not found 2026-01-10T12:06:15.085440Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037888 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:06.064600Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:06.065031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:06.403663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:06.413850Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:06.442089Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:06.442537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:06.442807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:06.444876Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:06.445241Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:06.445452Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:06.954220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:07.095169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:07.095311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:07.096597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:07.096715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:07.151206Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:07.152543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:07.153196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:07.187252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:07.281971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:08.701692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:08.701773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:08.701814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:08.702293Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:24.369582Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:24.370625Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:24.372892Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:24.385555Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:24.397228Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:24.399207Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:298:2224], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:24.399678Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:24.399879Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:24.402084Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:24.402180Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:24.837430Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:24.924901Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:24.925039Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:24.926196Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:24.926314Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:24.974953Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:24.975905Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:24.976542Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:25.063834Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:25.098618Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:26.485312Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:26.485393Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:26.485438Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:26.485932Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:40.487387Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:40.487653Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:40.517456Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:40.519217Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:40.521883Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:506:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:40.522350Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:40.522648Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:40.525867Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:40.526205Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:40.526864Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:41.274333Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:41.416681Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:41.416829Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:41.417351Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconne ... for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:49.682092Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:49.682233Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:50.375434Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:50.503109Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:50.503302Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:50.541530Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:50.618824Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:51.913664Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:51.913759Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:51.913813Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:51.914465Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:58.109893Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:58.117884Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:58.122688Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:58.123167Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:58.123358Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:58.552781Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:58.552954Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:58.606460Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1768046753507318 != 1768046753507322 2026-01-10T12:05:58.630356Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:58.689681Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:58.776901Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:59.550627Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:59.550689Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:59.550719Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:59.555432Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:05.777794Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:05.785852Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:05.798573Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:05.799129Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:05.799287Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:06.242328Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:06.416435Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:06.416651Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:06.463281Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:06.560232Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:08.084575Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:08.084654Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:08.084700Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:08.085420Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:20.160243Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:20.160701Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:20.205177Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:20.206763Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:20.213531Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:499:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:20.214173Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:20.214269Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:20.225349Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:492:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:20.226075Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:20.226341Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:20.858558Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:20.989339Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:20.989492Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:20.990028Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:20.990098Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:21.039983Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-10T12:06:21.044408Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:21.044929Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:21.151058Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:21.178480Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:22.816460Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:22.816558Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:22.816622Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:22.817319Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |84.4%| [TM] {BAZEL_UPLOAD} ydb/services/keyvalue/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |84.4%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |84.5%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> KqpRboPg::PredicatePushdownLeftJoin [GOOD] >> KqpRboPg::TPCH |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TCdcStreamTests::VirtualTimestamps >> TCdcStreamTests::Basic >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TTxDataShardRecomputeKMeansScan::BadRequest [GOOD] >> TTxDataShardRecomputeKMeansScan::MainTable >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> DataShardWrite::UncommittedUpdateLockMissingRow [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-10T12:06:27.002359Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.002388Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.002410Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:27.024427Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:27.036726Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:27.066727Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.068293Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:27.133747Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:06:27.134238Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:06:27.134444Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-10T12:06:27.134543Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:27.134694Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:27.134731Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-10T12:06:27.134767Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:06:27.134795Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:06:27.136209Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.136233Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.136259Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:27.148362Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:27.151392Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:27.151621Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.151963Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-10T12:06:27.152964Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:06:27.153155Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:06:27.153476Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:06:27.153710Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:06:27.153826Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:27.153888Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:06:27.153924Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:06:27.154076Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-10T12:06:27.154190Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:06:27.154221Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:06:27.154240Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:06:27.154354Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-10T12:06:27.154423Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-10T12:06:27.154452Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-10T12:06:27.154471Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:06:27.154549Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-10T12:06:27.154593Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-10T12:06:27.154612Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-10T12:06:27.154648Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:06:27.154736Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-10T12:06:27.156224Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.156252Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.156273Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:27.156573Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:27.173090Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:27.173304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.173669Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2026-01-10T12:06:27.174692Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:06:27.174952Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:06:27.176046Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:06:27.176313Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:06:27.176930Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:27.176982Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:06:27.177087Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2026-01-10T12:06:27.177176Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:06:27.177197Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:06:27.177254Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2026-01-10T12:06:27.177309Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:06:27.177327Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:06:27.177398Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2026-01-10T12:06:27.177482Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:06:27.177504Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:06:30.911840Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2026-01-10T12:06:31.037902Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:06:31.038378Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:06:31.038436Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:31.038880Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:31.040282Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:31.040515Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:06:31.044859Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-10T12:06:31.175746Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-10T12:06:31.176824Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:31.178807Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:06:31.188467Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:06:31.189318Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-10T12:06:31.194475Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-10T12:06:31.195408Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-10T12:06:31.196502Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-10T12:06:31.197511Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-10T12:06:31.207051Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-10T12:06:31.207952Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-10T12:06:31.208028Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-10T12:06:31.208501Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:06:31.212424Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2026-01-10T12:06:31.224553Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.224585Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.224620Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:31.232410Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:31.251675Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:31.251869Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.252067Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:31.252664Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-10T12:06:31.260788Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.260821Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.260846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:31.270547Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:31.280310Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:31.280502Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.281081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:31.284239Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:31.288193Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:31.288273Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:06:31.294721Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |84.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange >> THealthCheckTest::LayoutCorrect [GOOD] >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> KqpRboPg::FallbackToYqlDisabled [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::DropMultipleStreams >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:32.330221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:32.330323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.330362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:32.330397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:32.330434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:32.330465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:32.330565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.330696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:32.331629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:32.331994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:32.433942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:32.434042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:32.451622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:32.451771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:32.452017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:32.462032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:32.462325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:32.463113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:32.463424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:32.466314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.466552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:32.468022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:32.468114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.468194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:32.468264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:32.468328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:32.468657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:32.658373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.659565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.659744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.659825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.659909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.659973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.660785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2026-01-10T12:06:34.424764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2026-01-10T12:06:34.424788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2026-01-10T12:06:34.424816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-10T12:06:34.424844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-10T12:06:34.424867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-10T12:06:34.424888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-10T12:06:34.424908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-10T12:06:34.424937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-10T12:06:34.424967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-10T12:06:34.424988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-10T12:06:34.425009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-10T12:06:34.425052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-10T12:06:34.425080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-10T12:06:34.425101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-10T12:06:34.425128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-10T12:06:34.425148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-10T12:06:34.425185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-10T12:06:34.425208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-10T12:06:34.425240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-10T12:06:34.425285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-10T12:06:34.425308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-10T12:06:34.425327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-10T12:06:34.425348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:06:34.425368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:06:34.425393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:06:34.425416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:06:34.425440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:06:34.425463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:06:34.425486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:06:34.425524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:06:34.425548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:06:34.425568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:06:34.425785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.425887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.425992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:06:34.426046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:06:34.426074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:06:34.426093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:06:34.426123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:06:34.426264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.426341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.426592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-10T12:06:34.426794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:06:34.427217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.427327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.427760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.427859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.428371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.428557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.428630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.428736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.428913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.429008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.429497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.429790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.429876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.429930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.430087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.430149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.430233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.437498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:34.443438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:34.443528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:34.444127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:34.444209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:34.444265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:34.445205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FallbackToYqlDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 32279, MsgBus: 65087 2026-01-10T12:06:07.080857Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703045436603978:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:07.080907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:07.571269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:07.577858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:07.577984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:07.651682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:07.670360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:07.831670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:07.968777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:07.968799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:07.968820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:07.968904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:08.115011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:08.789041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:11.174376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703062616473997:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.174537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.175068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703062616474007:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.175123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.482922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:11.666175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703062616474108:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.666301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.666747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703062616474114:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.666826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.666825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703062616474113:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.671641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:11.687384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703062616474117:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:06:11.805344Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703062616474168:2604] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:12.084194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703045436603978:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:12.084270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12418, MsgBus: 11269 2026-01-10T12:06:12.993512Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703065418844898:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:12.996541Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:13.135357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:13.141256Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:13.169298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:13.169380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:13.178622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:13.304068Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:13.320710Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:13.320732Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:13.320739Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:13.320816Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:13.749831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:13.754245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:13.976453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:16.825562Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703082598714834:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:16.825643Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:16.826438Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703082598714844:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:16.826493Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:16.871423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:16.942100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: ... 648:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:22.820749Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:22.828067Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:22.844712Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703110413485652:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:06:22.924163Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703110413485705:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:23.115111Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7593703110413485714:2347], database: /Root, text: "\n --!syntax_pg\n SET TablePathPrefix = \"/Root/\";\n select sum(distinct t1.c), sum(distinct t1.a) from t1 group by t1.b order by t1.b;\n " 2026-01-10T12:06:23.819314Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593703093233615716:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:23.822105Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:26.669634Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7593703127593354973:2375], database: /Root, text: "\n INSERT INTO `/Root/t1` (a, b, c) VALUES (1, 2, 3);\n " Trying to start YDB, gRPC: 23244, MsgBus: 5362 2026-01-10T12:06:27.984713Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703131237008755:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:28.004618Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:28.008387Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.125353Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:28.158245Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:28.158342Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:28.182436Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.291459Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.403135Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:28.403168Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:28.403180Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:28.403280Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:28.992204Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:29.002847Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:29.011605Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:32.741499Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703152711845883:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.741602Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.742032Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703152711845893:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.742080Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.799974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:32.988221Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593703131237008755:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.988346Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:33.026505Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703157006813288:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.026636Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.026968Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703157006813293:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.027007Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703157006813294:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.027116Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.031750Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:33.056664Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703157006813297:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:06:33.156419Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703157006813348:2599] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:33.263650Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593703157006813357:2348], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:4:1: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported, code: 1 2026-01-10T12:06:33.264547Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=Y2ZiMmMyNmQtYmE3YTg3NjMtM2RkYzFmMDYtYzkzNjY2YTE=, ActorId: [4:7593703152711845986:2336], ActorState: ExecuteState, LegacyTraceId: 01kekwt4a17ayptaqxy0rktb6q, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 4 column: 1 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported" end_position { row: 4 column: 1 } issue_code: 1 } } tx_id# trace_id# 2026-01-10T12:06:33.414680Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593703157006813383:2358], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed, code: 1 2026-01-10T12:06:33.416440Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=MTEyNjFhNWUtZTcyM2NkODktNjNlZjQ2NzQtMjVjYTZmM2M=, ActorId: [4:7593703157006813375:2353], ActorState: ExecuteState, LegacyTraceId: 01kekwt4kcectayw7zz69xn1qc, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed" issue_code: 1 } } tx_id# trace_id# |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:05.975000Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:05.975368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:06.195265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:06.202139Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:06.237214Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:06.237661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:06.237911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:06.251736Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:06.252173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:06.252396Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:07.190381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:07.376721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:07.376850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:07.377655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:07.377749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:07.446890Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:07.447859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:07.448356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:07.596351Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:07.611096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:09.734041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:09.734114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:09.734166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:09.734700Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:23.599623Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:23.604913Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:23.607717Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:23.636153Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:23.639281Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:23.641101Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:298:2224], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:23.641525Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:23.641688Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:23.643737Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:23.643860Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:24.619634Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:24.765035Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:24.765200Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:24.766068Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:24.766158Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:24.824504Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:24.825363Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:24.825828Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:24.941205Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:24.955052Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:26.478438Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:26.478504Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:26.478546Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:26.478896Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:39.869055Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.870071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:39.872956Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:39.888441Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:39.889711Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:39.890442Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:39.890524Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:39.894207Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:39.894316Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:40.446648Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:40.630092Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.630236Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.630737Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:40.630837Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:40.706097Z node 5 :HIVE WARN: ... WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:54.014241Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:54.015198Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:681:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:54.015546Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:54.015770Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:54.018716Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:676:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:54.019090Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:54.019306Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:54.562086Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:54.682383Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.682531Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.683112Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.683200Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.734206Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-01-10T12:05:54.735266Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.735659Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.807460Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:54.822851Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:56.165247Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:56.165312Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:56.165352Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:56.165918Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:02.891594Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:02.898616Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:02.901268Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:444:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:02.901727Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:02.901912Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:03.453772Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:03.570419Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:03.570618Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:03.620796Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:03.691233Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:04.874260Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:04.874339Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:04.874387Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:04.880656Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:04.960785Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:06:19.597136Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:19.603684Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:19.614944Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:19.615267Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:19.615415Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:20.211785Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:20.342741Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:20.342914Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:20.402706Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:20.528518Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:21.813030Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:21.813112Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:21.813174Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:21.813635Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:30.228055Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:30.234452Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:30.239986Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:30.240351Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:30.240511Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:30.765787Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:30.889945Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:30.890090Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:30.931066Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:31.035861Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:32.570317Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:32.570374Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:32.570404Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:32.570995Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::SchemaChanges >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TCdcStreamTests::DropMultipleStreams [GOOD] >> TCdcStreamTests::Attributes >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> THealthCheckTest::TestStateStorageYellow >> THealthCheckTest::NoStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:33.691241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:33.691337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:33.691375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:33.691409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:33.691445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:33.691485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:33.691554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:33.691643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:33.692578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:33.692842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:33.788438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:33.788504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:33.805827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:33.805949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:33.806135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:33.814670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:33.814924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:33.815669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:33.815915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:33.818395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:33.818616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:33.819820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:33.819887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:33.819965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:33.820026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:33.820109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:33.820371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:34.013206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.014987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.015078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.015149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.015224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.015329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:34.015410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 110 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:35.972055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 110:0, at schemeshard: 72057594046678944 2026-01-10T12:06:35.976593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 110:1, propose status:StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", at schemeshard: 72057594046678944 2026-01-10T12:06:35.979595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 110, response: Status: StatusInvalidParameter Reason: "Last patrition 2 doesn\'t have the highest bound \"AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\"" TxId: 110 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:35.979946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 110, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2026-01-10T12:06:35.980371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2026-01-10T12:06:35.980418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2026-01-10T12:06:35.980933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-01-10T12:06:35.981057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-10T12:06:35.981094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:1028:2946] TestWaitNotification: OK eventTxId 110 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } TestModificationResults wait txId: 112 2026-01-10T12:06:35.989538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:35.989843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 112:0, at schemeshard: 72057594046678944 2026-01-10T12:06:35.990067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 112:1, propose status:StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, at schemeshard: 72057594046678944 2026-01-10T12:06:35.997106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 112, response: Status: StatusInvalidParameter Reason: "Only 1 root partitions has new bounds, required: 3" TxId: 112 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:35.997416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 112, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-10T12:06:35.997906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-10T12:06:35.997958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-10T12:06:35.998409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-10T12:06:35.998579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:06:35.998622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1035:2953] TestWaitNotification: OK eventTxId 112 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } TestModificationResults wait txId: 114 2026-01-10T12:06:36.007233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:36.007518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 114:0, at schemeshard: 72057594046678944 2026-01-10T12:06:36.007669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-10T12:06:36.017186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:36.017729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 114, wait until txId: 114 TestWaitNotification wait txId: 114 2026-01-10T12:06:36.018175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 114: send EvNotifyTxCompletion 2026-01-10T12:06:36.018220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 114 2026-01-10T12:06:36.018754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2026-01-10T12:06:36.018891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-10T12:06:36.018942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:1042:2960] TestWaitNotification: OK eventTxId 114 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } TestModificationResults wait txId: 116 2026-01-10T12:06:36.030223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } } } TxId: 116 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:36.030635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 116:0, at schemeshard: 72057594046678944 2026-01-10T12:06:36.030917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 116:1, propose status:StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", at schemeshard: 72057594046678944 2026-01-10T12:06:36.035370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 116, response: Status: StatusInvalidParameter Reason: "Partitions 0 and 0 have overlapped bounds at point \"-inf\"" TxId: 116 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:36.035773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 116, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 116, wait until txId: 116 TestWaitNotification wait txId: 116 2026-01-10T12:06:36.036282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 116: send EvNotifyTxCompletion 2026-01-10T12:06:36.036355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 116 2026-01-10T12:06:36.036850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-10T12:06:36.036965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-10T12:06:36.037009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:1049:2967] TestWaitNotification: OK eventTxId 116 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> TCdcStreamTests::SchemaChanges [GOOD] >> TCdcStreamTests::RetentionPeriod >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:04.746031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:04.746389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:04.925540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:04.926627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:04.934743Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:04.935171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:04.935452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:04.948158Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:04.948750Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:04.948947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:05.762680Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:05.946082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:05.946211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:05.947012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:05.947104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:06.013844Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:06.014776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:06.015313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:06.123869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:06.158627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:07.842404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:07.842472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:07.842507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:07.842933Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:22.265094Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:22.265370Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:22.276564Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:22.278046Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:22.280118Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:22.280654Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:22.280925Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:22.282924Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:22.283118Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:22.283249Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:23.073529Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:23.314282Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:23.314416Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:23.315081Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:23.315161Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:23.395658Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:23.396624Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:23.396938Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:23.493463Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:23.520479Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:25.456898Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:25.456987Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:25.457036Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:25.457562Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:40.710358Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:40.711346Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:40.714379Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:40.731897Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:40.733182Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:40.733820Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:40.733906Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:40.738680Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:40.738807Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:41.199675Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:41.393849Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:41.393996Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:41.395002Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:41.395090Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting ... 106Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:06.806459Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:06.806634Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:07.354148Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:07.578782Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:07.578953Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:07.582257Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:07.582375Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:07.632380Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-10T12:06:07.636520Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:07.636986Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:07.707427Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:07.738007Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:09.109656Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:09.109717Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:09.109877Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:09.110212Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/d9xi/0033d4/r3tmp/tmprIzqkY/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/d9xi/0033d4/r3tmp/tmprIzqkY/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/d9xi/0033d4/r3tmp/tmprIzqkY/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-10T12:06:22.125031Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:22.125712Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:22.139693Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:22.141102Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:22.143304Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:497:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:22.143767Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:22.143991Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:22.145923Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:490:2167], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:22.146279Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:22.146430Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:22.905018Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:23.191436Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:23.191608Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:23.200175Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:23.200305Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:23.252362Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-10T12:06:23.253097Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:23.253656Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:23.336608Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:23.352706Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:25.084141Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:25.084222Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:25.084291Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:25.085018Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:33.753258Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:33.764070Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:33.809571Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:33.810190Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:33.810505Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:34.754246Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:34.938838Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:34.939055Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:34.980488Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:35.060166Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:36.843934Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:36.844211Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:36.844304Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:36.845420Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TTxDataShardUploadRows::RetryUploadRowsToShard >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> KqpBatchUpdate::Large_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:35.548982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:35.549084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:35.549141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:35.549178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:35.549213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:35.549262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:35.549321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:35.549404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:35.550296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:35.550621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:35.870855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:35.870933Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:35.917658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:35.917772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:35.917973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:35.936772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:35.937063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:35.937750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:35.937997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:35.940940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:35.941116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:35.942369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:35.942429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:35.942492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:35.942534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:35.942592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:35.942861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:36.181194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.182915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:36.183497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... lPathId: 27] was 0 2026-01-10T12:06:41.393472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:06:41.393502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:06:41.393525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:06:41.393548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:06:41.393588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:06:41.393612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:06:41.393635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:06:41.393660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:06:41.393680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:06:41.393705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:06:41.393861Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.393988Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.394096Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:06:41.394144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:06:41.394178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:06:41.394200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:06:41.394220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:06:41.394323Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.394392Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.394611Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-10T12:06:41.394843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:06:41.428400Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.428555Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429014Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429101Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429612Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429778Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.429962Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.430068Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.430596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.430894Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.430990Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.431058Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.431212Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.431270Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.431323Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:06:41.458017Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:41.484962Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:41.485052Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:41.485854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:41.485980Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:41.486042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:41.493434Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1144:3048] sender: [2:1204:2058] recipient: [2:15:2062] 2026-01-10T12:06:41.605295Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:06:41.605625Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 393us result status StatusSuccess 2026-01-10T12:06:41.606287Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> KqpRboYql::OlapPredicatePushdown [GOOD] >> KqpRboYql::OrderBy |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:38.163914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:38.164004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:38.175280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:38.175406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:38.175483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:38.175519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:38.175635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:38.175727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:38.176582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:38.176887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:38.552193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:38.552281Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:38.610529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:38.644494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:38.644760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:38.681273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:38.681481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:38.682190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:38.682705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:38.705242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:38.705483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:38.706751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:38.706822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:38.706951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:38.707009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:38.707054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:38.707309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:39.174654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.175850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.175998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:39.176971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_class' 2026-01-10T12:06:42.260986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2026-01-10T12:06:42.261039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/partition_stats' 2026-01-10T12:06:42.261077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-10T12:06:42.261146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-10T12:06:42.261203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-10T12:06:42.261303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_request_units_one_hour' 2026-01-10T12:06:42.261395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-10T12:06:42.261446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-10T12:06:42.261495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_by_tli_one_hour' 2026-01-10T12:06:42.261551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-10T12:06:42.265432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-10T12:06:42.265576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_hour' 2026-01-10T12:06:42.265662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-10T12:06:42.265754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_duration_one_minute' 2026-01-10T12:06:42.265819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-10T12:06:42.265906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-10T12:06:42.266016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-10T12:06:42.266100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_tables' 2026-01-10T12:06:42.266148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-10T12:06:42.266209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-10T12:06:42.266259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_users' 2026-01-10T12:06:42.266314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-10T12:06:42.271224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-10T12:06:42.271345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_group_members' 2026-01-10T12:06:42.271396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-10T12:06:42.271444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/streaming_queries' 2026-01-10T12:06:42.271490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-10T12:06:42.271551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-10T12:06:42.271658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-10T12:06:42.271739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/resource_pools' 2026-01-10T12:06:42.271776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-10T12:06:42.271871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_permissions' 2026-01-10T12:06:42.271909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-10T12:06:42.271957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-10T12:06:42.272023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_minute' 2026-01-10T12:06:42.272054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:863:2817] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-10T12:06:42.278244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-10T12:06:42.278510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2026-01-10T12:06:42.278824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2026-01-10T12:06:42.281958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:06:42.282224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2026-01-10T12:06:42.285941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-10T12:06:42.286185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2026-01-10T12:06:42.286503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2026-01-10T12:06:42.289391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:06:42.289658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:40.321263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:40.321379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:40.321418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:40.321488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:40.321528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:40.321568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:40.321677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:40.321757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:40.322685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:40.323021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:40.426194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:40.426267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:40.443078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:40.443175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:40.443365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:40.452871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:40.453131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:40.453877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:40.454157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:40.456706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:40.456939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:40.458252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:40.458317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:40.458398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:40.458457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:40.458499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:40.458828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:40.608820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.609869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.610999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:40.611078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T12:06:42.835168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:06:42.835240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-10T12:06:42.835361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:06:42.835483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1071:3001], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:06:42.841314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:42.841383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:06:42.841599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:42.841639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:06:42.842170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:06:42.842282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-10T12:06:42.842329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-10T12:06:42.843267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:06:42.843367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:06:42.843400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:06:42.843448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:06:42.843502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-10T12:06:42.843602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:06:42.858921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:06:42.859002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:06:42.859150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:06:42.859215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:42.859278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:06:42.859325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:42.859371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:06:42.859417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:42.859472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:06:42.859508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:06:42.859720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:06:42.860710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:06:42.862845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:06:42.862898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:06:42.863428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:06:42.863554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:06:42.863597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1276:3157] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-10T12:06:42.867639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:42.867844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2026-01-10T12:06:42.867899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2026-01-10T12:06:42.868092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-10T12:06:42.868145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-10T12:06:42.871030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:42.871302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2026-01-10T12:06:42.875245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:42.875440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2026-01-10T12:06:42.875486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2026-01-10T12:06:42.875650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-10T12:06:42.875698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-10T12:06:42.878551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:42.878831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 23972, MsgBus: 20528 2026-01-10T12:05:52.357291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702979605249500:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:52.357398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:52.404668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:52.822776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:52.822883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:52.860781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:52.901518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:52.929790Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:52.976255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702979605249279:2081] 1768046752294202 != 1768046752294205 2026-01-10T12:05:53.174167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:53.309883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:53.316563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:53.316595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:53.320445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:53.349236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:54.121785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:54.148459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:54.225271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.548744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.744852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.879449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.361529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702979605249500:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:57.362558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703001080087634:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.362638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:57.362874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.374013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703001080087644:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.374138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.973411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.025760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.095274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.155202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.219817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.315878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.383483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.488316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:58.666599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703005375055819:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.666732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.667814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703005375055824:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.667855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703005375055825:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.667898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.672220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:58.687966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703005375055828:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:05:58.773214Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703005375055879:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... 128924629036:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:27.430234Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:27.598179Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:27.672680Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:27.676393Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703128924628859:2081] 1768046787369783 != 1768046787369786 2026-01-10T12:06:27.703470Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:27.714305Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:27.741991Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:27.955638Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:27.969752Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:27.969776Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:27.969784Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:27.969865Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:28.470313Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.720202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:28.732596Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:28.752065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:28.861025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:29.167384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:29.290886Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:32.429084Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593703128924629036:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.429150Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:33.381677Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703154694434507:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.381789Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.382278Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703154694434517:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.382345Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.519156Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:33.578157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:33.648673Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:33.730565Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:33.812251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:33.906068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:34.000162Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:34.147918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:34.367647Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703158989402701:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:34.367787Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:34.368380Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703158989402706:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:34.368444Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703158989402707:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:34.368490Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:34.373491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:34.418269Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703158989402710:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:06:34.472748Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703158989402763:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:37.683723Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:16.040405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:16.040656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:16.378330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:16.385210Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:16.409272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:16.409845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:16.409959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:16.412030Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:16.420614Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:16.420686Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:17.472349Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:17.680161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:17.680306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:17.681097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:17.681177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:17.735498Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:17.736530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:17.736882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:17.882161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:17.897504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:19.547536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:19.547619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:19.547659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:19.554936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-7932-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-1" reason: "YELLOW-7932-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-edf5-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-10T12:05:29.410540Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:29.410839Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:29.424475Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:29.425825Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:29.428178Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:29.428659Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:29.428959Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:29.431087Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:29.431257Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:29.431440Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:29.938721Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:30.103200Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:30.103386Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:30.103941Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:30.104044Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:30.176611Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:30.177659Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:30.178116Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:30.286082Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:30.317367Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:31.631851Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:31.631932Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:31.631992Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:31.632885Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:42.094769Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:42.095953Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:42.098840Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:42.114426Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:42.115551Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h: ... node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:05.902710Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:06.965042Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:06.965098Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:06.965147Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:06.965721Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:17.967237Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:17.967784Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:17.983711Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:17.985167Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:17.987610Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:497:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:17.988264Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:17.988530Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:17.991080Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:490:2167], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:17.991468Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:17.991653Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:18.559086Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:18.726208Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:18.726336Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:18.727491Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:18.729376Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:18.796974Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-10T12:06:18.797611Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:18.798088Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:18.907235Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:18.955939Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:19.943373Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:19.943426Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:19.943451Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:19.943853Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:33.301773Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:33.303118Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:33.331676Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:761:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:33.332675Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:33.332765Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:33.335461Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:773:2353], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:33.335803Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:33.335888Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:33.977504Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:34.198300Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:34.198472Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:34.199799Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:34.199908Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:34.264931Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-10T12:06:34.266146Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:34.266670Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:34.400099Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:34.443977Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:40.990718Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:41.002223Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:41.002306Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:41.002356Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:41.003936Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:41.032279Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:41.032511Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:41.084107Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-10T12:06:41.084898Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" reason: "YELLOW-7932-1231c6b1-15" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 13 host: "::1" port: 12001 } |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |84.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |84.5%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TTxDataShardUploadRows::TestUploadRows >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] >> TSchemeShardServerLess::StorageBillingLabels >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] Test command err: 2026-01-10T12:05:02.864843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702763847751580:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:02.883530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:02.912938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:03.489223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:03.489343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:03.496300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:03.639125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:03.720623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:03.722098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702763847751535:2081] 1768046702810480 != 1768046702810483 2026-01-10T12:05:03.908476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:03.969426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:04.018493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:04.018518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:04.018530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:04.018643Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:04.679564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-10T12:05:16.227293Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:16.228522Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:16.246076Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:16.262320Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:16.270295Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:16.271092Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:16.271419Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:16.281930Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:690:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:16.282045Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:16.282087Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:16.862942Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:17.084473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:17.084633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:17.085553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:17.085637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:17.140217Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:05:17.141134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:17.141576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:17.224357Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:17.251448Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:18.636186Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:18.636268Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:18.636306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:18.636756Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-10T12:05:34.395688Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:34.397410Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:34.398592Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:34.432778Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:34.435010Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:34.437260Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:34.437694Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:34.437962Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:34.440302Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:34.440511Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:35.060240Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:35.292372Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:35.292527Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:35.293601Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:35.293696Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:35.345442Z node 4 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:05:35.346265Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:35.346713Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:35.455441Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:35.473185Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:36.828424Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:3 ... ORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:54.512581Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.512771Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.567752Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.665168Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:56.250145Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:56.250236Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:56.250291Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:56.251074Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:56.329517Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-10T12:06:10.835454Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:10.849125Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:10.852760Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:10.853193Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:10.853367Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:11.449353Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:11.621082Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:11.621279Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:11.675179Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:11.793759Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:13.301862Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:13.308247Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:13.308324Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:13.309205Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:39.298866Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:39.304424Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:39.317445Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:39.318674Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:39.491991Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:06:39.494882Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:06:39.503132Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:39.503246Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:39.503306Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:39.906012Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1243:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:39.906722Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:39.912409Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:39.928087Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:1245:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:39.928544Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [16:556:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:39.929434Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:39.929558Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:39.929895Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:534:2143], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:39.930082Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:39.930183Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:39.930811Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:39.930898Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:40.646476Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:40.646733Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:40.701225Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:40.722001Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:37:2084] 1768046775346962 != 1768046775346967 2026-01-10T12:06:40.774097Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:40.810136Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:40.810290Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:40.824389Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:40.824562Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:40.859519Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:40.859658Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:40.946769Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:77:2075] 1768046775365940 != 1768046775365944 2026-01-10T12:06:40.961721Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:157:2075] 1768046775385663 != 1768046775385666 2026-01-10T12:06:40.963252Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [15:117:2075] 1768046775376670 != 1768046775376674 2026-01-10T12:06:41.111710Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-10T12:06:41.129397Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-10T12:06:41.129743Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-10T12:06:41.130093Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:41.130693Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:41.130875Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:41.131046Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TTxDataShardRecomputeKMeansScan::MainTable [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> KqpRboYql::ExpressionSubquery [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> KqpRboYql::Aggregation+ColumnStore [GOOD] >> KqpRboYql::Aggregation-ColumnStore >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TSchemeShardServerLess::Fake [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:50.529383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:50.529469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:50.529508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:50.529555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:50.529623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:50.529671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:50.529755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:50.529842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:50.530954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:50.531303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:50.631318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:50.631395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:50.647510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:50.653286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:50.653584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:50.661826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:50.662064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:50.662877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:50.663355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:50.667091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:50.667331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:50.668705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:50.668768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:50.668909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:50.668986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:50.669030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:50.669341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.677030Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:06:50.872670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:50.872964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.873280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:06:50.873333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:06:50.873588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:06:50.873667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:50.882579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:50.882834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:06:50.883051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.883108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:06:50.883167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:06:50.883201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:06:50.886700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.886790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:06:50.886850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:06:50.892124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.892204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.892265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:06:50.893471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:06:50.897893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:06:50.905292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:06:50.905521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:06:50.906828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:50.906997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:50.907056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:06:50.907356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:06:50.907414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:06:50.907604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:06:50.907717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:06:50.910388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:50.910442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-10T12:06:52.011540Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 2026-01-10T12:06:52.015198Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 2026-01-10T12:06:52.015454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-10T12:06:52.015801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 Forgetting tablet 72075186234409550 2026-01-10T12:06:52.017461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:06:52.017660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-10T12:06:52.018453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:06:52.018529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:06:52.018644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-10T12:06:52.019039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:06:52.019255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:06:52.019307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:06:52.019405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:06:52.023035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:06:52.023097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2026-01-10T12:06:52.023224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-10T12:06:52.023249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2026-01-10T12:06:52.023585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:06:52.023630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2026-01-10T12:06:52.026222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:06:52.026325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:06:52.026679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:06:52.026722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:06:52.027349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:06:52.027464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:06:52.027505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:943:2803] TestWaitNotification: OK eventTxId 106 2026-01-10T12:06:52.028175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:06:52.028406Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 297us result status StatusPathDoesNotExist 2026-01-10T12:06:52.028586Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:06:52.029138Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:06:52.029346Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 163us result status StatusPathDoesNotExist 2026-01-10T12:06:52.029505Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:06:52.030066Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:06:52.030255Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 181us result status StatusSuccess 2026-01-10T12:06:52.030732Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2026-01-10T12:06:52.031408Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2026-01-10T12:06:52.031541Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2026-01-10T12:06:52.031595Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2026-01-10T12:06:52.031636Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2026-01-10T12:06:17.671082Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.671125Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.671149Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.671621Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.691509Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.691824Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.692219Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.692752Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.692892Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.692974Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.693009Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:06:17.693877Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.693906Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.693934Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.694309Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.696414Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.696564Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.696827Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.697223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.697360Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.698523Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.698579Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:06:17.699808Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.699837Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.700027Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.704436Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.705363Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.705530Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.705964Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.706670Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.706828Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.706971Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.707004Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:06:17.707952Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.707984Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.708001Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.708365Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.712329Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.712516Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.713276Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.715103Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.715860Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.716090Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.716154Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:06:17.724420Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.724451Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.724474Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.724831Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.726688Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.726903Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.727213Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.727703Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.727885Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.728016Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.728062Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:06:17.729212Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.729244Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.729273Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.733797Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.735995Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.736247Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.740323Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.740906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.744210Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.748204Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.748294Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:06:17.761173Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.761205Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.761233Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.774711Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.780370Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.780589Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.782631Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.783499Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.785001Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.786290Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.786353Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:06:17.787696Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.787720Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.787747Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:17.788150Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:06:17.788813Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:06:17.788956Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.792286Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:06:17.793927Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:17.794352Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:06:17.794446Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:06:17.794492Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:06:17.814646Z :ReadSession INFO: Random seed for debugging is 1768046777814612 2026-01-10T12:06:18.507218Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703091348967213:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:18.507295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:18.568226Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005543s 2026-01-10T12:06:18.660265Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:18.692349Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.01566 ... mits: 0, PendingWrites: 0 2026-01-10T12:06:50.116316Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.216984Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.217012Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.217023Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.217038Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.217048Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.317435Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.317465Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.317476Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.317493Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.317504Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.417864Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.417891Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.417901Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.417917Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.417927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.518282Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.518307Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.518316Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.518342Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.518351Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.618737Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.618764Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.618777Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.618803Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.618813Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.719213Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.719242Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.719252Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.719270Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.719282Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.819718Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.819750Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.819761Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.819777Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.819787Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.833029Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_16812177913811581593_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-10T12:06:50.924213Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:50.924252Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.924266Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:50.924284Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:50.924296Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:50.932015Z :INFO: [/Root] [/Root] [e90ad36c-6d07b66a-4932be94-70da542f] Closing read session. Close timeout: 0.000000s 2026-01-10T12:06:50.932129Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-10T12:06:50.932184Z :INFO: [/Root] [/Root] [e90ad36c-6d07b66a-4932be94-70da542f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16739 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:06:50.932298Z :NOTICE: [/Root] [/Root] [e90ad36c-6d07b66a-4932be94-70da542f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:06:50.932358Z :DEBUG: [/Root] [/Root] [e90ad36c-6d07b66a-4932be94-70da542f] [dc1] Abort session to cluster 2026-01-10T12:06:50.932961Z :NOTICE: [/Root] [/Root] [e90ad36c-6d07b66a-4932be94-70da542f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:06:50.948747Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_16812177913811581593_v1 2026-01-10T12:06:50.940123Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_16812177913811581593_v1 grpc read done: success# 0, data# { } 2026-01-10T12:06:50.940162Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_16812177913811581593_v1 grpc read failed 2026-01-10T12:06:50.940195Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_16812177913811581593_v1 grpc closed 2026-01-10T12:06:50.940264Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_16812177913811581593_v1 is DEAD 2026-01-10T12:06:50.941346Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593703160068446288:2491] disconnected. 2026-01-10T12:06:50.941373Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593703160068446288:2491] disconnected; active server actors: 1 2026-01-10T12:06:50.941398Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593703160068446288:2491] client user disconnected session shared/user_1_1_16812177913811581593_v1 2026-01-10T12:06:50.949665Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7593703160068446291:2495] destroyed 2026-01-10T12:06:50.949736Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_16812177913811581593_v1 2026-01-10T12:06:51.024183Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:51.024217Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.024229Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:51.024248Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.024260Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:51.131681Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:51.131723Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.131738Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:51.131762Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.131777Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:06:51.231807Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:06:51.231846Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.231860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:06:51.231880Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:06:51.231892Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:50.224846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:50.224949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:50.224987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:50.225040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:50.225086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:50.225130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:50.225225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:50.225316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:50.226233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:50.226568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:50.324573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:50.324652Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:50.349896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:50.350043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:50.350288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:50.374247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:50.374566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:50.375379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:50.375660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:50.378404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:50.378668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:50.379998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:50.380064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:50.380179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:50.380232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:50.380277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:50.380591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:50.549200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.550966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:50.551446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2026-01-10T12:06:53.110373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2026-01-10T12:06:53.110612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:06:53.122072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2026-01-10T12:06:53.122270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-10T12:06:53.122379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:06:53.122531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-10T12:06:53.122758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725658 2026-01-10T12:06:53.122878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:53.123027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:53.123092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-10T12:06:53.123436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:06:53.123513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-10T12:06:53.123656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:06:53.123778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1071:3001], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:06:53.133317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:53.133386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:06:53.133615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:53.133659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:06:53.134067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:06:53.134121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-10T12:06:53.134177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-10T12:06:53.134840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:06:53.134937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:06:53.134975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:06:53.135027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:06:53.135092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-10T12:06:53.135191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:06:53.144420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:06:53.144517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:06:53.144678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:06:53.144737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:53.144805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:06:53.144853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:53.144895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:06:53.144962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:06:53.145015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:06:53.145056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:06:53.145301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:06:53.146241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:06:53.153358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:06:53.153424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:06:53.154052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:06:53.154181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:06:53.154230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1276:3157] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-10T12:06:53.162232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:06:53.162528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2026-01-10T12:06:53.162609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2026-01-10T12:06:53.162791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-10T12:06:53.162846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-10T12:06:53.169913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:06:53.170209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |84.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::ExpressionSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 21288, MsgBus: 12994 2026-01-10T12:06:04.283703Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703032538563194:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:04.283777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:04.586258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:04.586385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:04.618860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:04.641033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:04.645199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:04.651524Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703032538563158:2081] 1768046764282258 != 1768046764282261 2026-01-10T12:06:04.906436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:04.916536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:04.916559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:04.916581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:04.916694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:05.294722Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:05.536028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:05.552497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:06:08.230170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703049718433241:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:08.230277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:08.231393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703049718433251:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:08.231457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:08.610552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:06:09.283896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703032538563194:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:09.284034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:09.467099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:09.467428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:09.467727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:09.467851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:09.467973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:09.469501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:09.469510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:09.469548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:09.469708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:09.469714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:09.469817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:09.469909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:09.470004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:09.470043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:09.470105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:09.470149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:06:09.470208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:09.470272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:06:09.470334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:09.470378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593703054013401115:2339];tablet_id=72075186224037936;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:06:09.470428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:09.470539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:09.470632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:09.470737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:06:09.470872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593703054013401147:2346];tablet_id=72075186224037895;process=TTxInitSch ... : tablet_id=72075186224037955;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.764588Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037957;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.764655Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037957;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.764681Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037957;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.766001Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.766049Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.766069Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.783700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.783787Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.783812Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.789043Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.789115Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.789135Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.794789Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.794874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.794896Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.797801Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037959;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.797861Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037959;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.797882Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037959;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.805369Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.805440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.805463Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.806180Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.806225Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.806246Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.816822Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.816882Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.816904Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.818403Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.818449Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.818469Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.834202Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.834265Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.834288Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.837693Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037954;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.837768Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037954;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:39.837792Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037954;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-10T12:06:40.116336Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703186151955883:3017], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:40.116478Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:40.117343Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703186151955888:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:40.117407Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703186151955889:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:40.117448Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:40.122164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:40.151465Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703186151955892:3022], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:06:40.232454Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703186151955945:5239] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TSchemeShardServerLess::StorageBilling >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:45.284103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:45.569000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:45.579565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:45.580113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:45.580230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:45.994045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:45.994244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:46.001598Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046802106871 != 1768046802106874 2026-01-10T12:06:46.014978Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:46.069736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:46.164358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:46.548547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:46.565605Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:46.696516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:46.766064Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:06:46.767303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:06:46.767664Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:06:46.767954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:46.900917Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:06:46.902336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:46.902553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:46.907057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:46.907219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:46.907293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:46.907773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:46.910011Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:46.910158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:06:46.924648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:47.017450Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:47.017774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:47.017946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:06:47.017997Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:47.018039Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:47.018078Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:47.018356Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:47.018436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:47.018915Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:47.019084Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:47.019202Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:47.019263Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:06:47.019323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:06:47.019366Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:06:47.019423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:06:47.019464Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:06:47.019518Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:47.019615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:47.019654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:47.019706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:06:47.019850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:06:47.019906Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:06:47.020041Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:06:47.028600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:06:47.028723Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:06:47.028870Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:06:47.028931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:06:47.028978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:06:47.029020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:06:47.029063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:06:47.029493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:06:47.029557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:06:47.029596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:06:47.029633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:06:47.029721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:06:47.029757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:06:47.029816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:06:47.029857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:06:47.029884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:06:47.032224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:06:47.032319Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:06:47.044799Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... r table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:53.497509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:53.497574Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:53.945250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:53.945383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:53.975283Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768046810099716 != 1768046810099720 2026-01-10T12:06:53.979931Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:54.036641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:54.128413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:54.511393Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.005383s 2026-01-10T12:06:54.516324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:54.532946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:54.652015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:54.690334Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-10T12:06:54.690578Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:54.779542Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:54.779711Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:54.786841Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:54.786971Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:54.787097Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:54.787543Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:54.787718Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:54.787859Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-10T12:06:54.806187Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:54.806293Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:54.806428Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:54.806567Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-10T12:06:54.806621Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:54.806668Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:54.806711Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:54.812750Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:54.812900Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:54.812966Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:54.813016Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:06:54.813067Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:06:54.813113Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:54.813224Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:884:2764], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-10T12:06:54.813400Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:06:54.813696Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:06:54.813830Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:06:54.818967Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:06:54.831175Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:06:54.831299Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:06:54.974939Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-10T12:06:54.979814Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:06:54.979914Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:54.980984Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:54.981067Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:06:54.981132Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:06:54.981466Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:06:54.988455Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:06:54.989209Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:54.989312Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:06:54.989932Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:06:54.990483Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:06:55.002810Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:06:55.002902Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:55.003819Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:06:55.003940Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:55.022146Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:55.022224Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:55.022297Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:06:55.022383Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:06:55.022445Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:06:55.022569Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:55.023630Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:06:55.038946Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:06:55.039060Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:06:55.040056Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:06:55.065531Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-10T12:06:55.065695Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:31.279534Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:31.279784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:31.473525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:31.476276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:31.483099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:31.483585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:31.483694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:31.486174Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:31.486540Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:31.486586Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:32.235425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:32.542056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:32.542180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:32.542970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:32.543058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:32.602072Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:32.603062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:32.603470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:32.762177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:32.787793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:34.219302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:34.219368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:34.219417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:34.219843Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:44.715057Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:44.715410Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:44.729367Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:44.731023Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:44.733478Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:44.733968Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:44.734262Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:44.736775Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:44.736995Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:44.737151Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:45.147234Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:45.282270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:45.282418Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:45.283185Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:45.283280Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:45.357314Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:45.358299Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:45.358738Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:45.463690Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:45.510437Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:46.837089Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:46.837173Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:46.837216Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:46.837921Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:56.717819Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:56.718995Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:56.723728Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:05:56.739144Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:56.741933Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:56.742525Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:56.742599Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:56.745325Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:56.745443Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:57.201983Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:57.346834Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:57.347037Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:57.347758Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:57.347852Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting ... be) 2026-01-10T12:06:12.001824Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:12.001875Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:12.002109Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-7" reason: "YELLOW-7932-1231c6b1-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-10T12:06:24.016559Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:24.017041Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:24.034828Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:24.038630Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:24.041463Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:24.041955Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:24.042248Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:24.044179Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:24.044789Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:24.044856Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:24.775689Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:24.978099Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:24.978290Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:24.979378Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:24.979495Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:25.032356Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-10T12:06:25.033287Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:25.033812Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:25.146852Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:25.182867Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:26.987840Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:26.987932Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:26.987983Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:26.988638Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-10T12:06:37.279463Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:37.288593Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:37.328427Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:267:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:37.329139Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:37.329359Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:38.017407Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:38.167598Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:38.167775Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:38.226114Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:38.308020Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:40.943322Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:40.943402Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:40.943454Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:40.944196Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:06:51.906181Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:51.915653Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:51.940531Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:51.941077Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:51.941356Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:52.838295Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:53.118889Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:53.119086Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:53.160839Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:53.275613Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:55.111878Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:55.111965Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:55.112019Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:55.112559Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> KqpPg::TypeCoercionBulkUpsert >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::EmptyQuery+useSink |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TTransferTests::Create >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit >> KqpPg::CreateTableBulkUpsertAndRead |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |84.6%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |84.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:58.778107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:58.778217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:58.778254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:58.778296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:58.778333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:58.778380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:58.778510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:58.778587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:58.779407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:58.779709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:58.996949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:58.997025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:59.050322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:59.050434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:59.050677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:59.088474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:59.088777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:59.089501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:59.093528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:59.096230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:59.096428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:59.097646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:59.097707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:59.097770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:59.097825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:59.097878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:59.098154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:59.445504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.453930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.454982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.455076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:59.455154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 46678944, message: Origin: 72075186233409546 TxId: 106 2026-01-10T12:07:01.647088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:795: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2026-01-10T12:07:01.647145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 138 -> 240 2026-01-10T12:07:01.649544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409549, cookie: 0 2026-01-10T12:07:01.650032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:07:01.650146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:07:01.652694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:07:01.652898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:07:01.652950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-10T12:07:01.653101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:07:01.653139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:07:01.653174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:07:01.653209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:07:01.653244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-10T12:07:01.653283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:07:01.653318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-10T12:07:01.653365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-10T12:07:01.653446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:07:01.664865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:07:01.664935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:07:01.665521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:07:01.665638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:07:01.665676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1363:3244] TestWaitNotification: OK eventTxId 106 2026-01-10T12:07:01.666387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:01.666637Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 265us result status StatusSuccess 2026-01-10T12:07:01.667124Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:01.667844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2026-01-10T12:07:01.668144Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 224us result status StatusSuccess 2026-01-10T12:07:01.668604Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409549 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976725657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2026-01-10T12:07:01.669438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:01.669624Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 188us result status StatusSuccess 2026-01-10T12:07:01.669991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:01.670566Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:39 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> TTransferTests::Create_Disabled >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] >> KqpRboPg::TPCH [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> KqpRboPg::Aggregation+ColumnStore [GOOD] >> KqpRboPg::Aggregation-ColumnStore >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> KqpRboYql::BasicJoins [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::TPCH [GOOD] Test command err: Trying to start YDB, gRPC: 22629, MsgBus: 62631 2026-01-10T12:05:55.678242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702991379641036:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:55.689264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:55.972008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:55.981743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:55.981857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:56.078174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:56.107474Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:56.303798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:56.340885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:56.340914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:56.340921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:56.341020Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:56.716412Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:56.872345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:56.882018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:59.369918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703008559511065:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.370060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.371682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703008559511077:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.371728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703008559511078:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.371775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:59.376049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:59.387679Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703008559511081:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:05:59.535416Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703008559511132:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63660, MsgBus: 62304 2026-01-10T12:06:00.681036Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703016180424101:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:00.681476Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:00.713696Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:00.805150Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:00.807503Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703016180423996:2081] 1768046760674266 != 1768046760674269 2026-01-10T12:06:00.825142Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:00.825224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:00.857449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:00.961563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:01.004734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:01.004762Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:01.004769Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:01.004843Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:01.486023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:01.696855Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:04.275302Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703033360294063:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.275377Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.275626Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703033360294072:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.275670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:04.569619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:06:05.286635Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[2:7593703033360294668:2334];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:05.286638Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:05.286910Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:05.287191Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:05.287303Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:05.287445Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:05.287452Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[2:7593703033360294668:2334];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:05.287581Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703033360294607:2330];tablet_id=72075186224037891;process= ... s_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.834086Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.843360Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.843436Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.843456Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.847496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.847736Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.847765Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.853307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.853382Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.853402Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.858805Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.858886Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.858908Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.863853Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.863931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.863954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.870202Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.870280Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.870300Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.873981Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.874047Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.874069Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:06:53.896296Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwtb55etmra5hep91nrre5", SessionId: ydb://session/3?node_id=4&id=MTkyYWJiYjMtNDJkN2ZiNTAtYTkyYWU1ZDEtMjFhODkzNGE=, Slow query, duration: 13.852986s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE `/Root/customer` (\n c_acctbal Double,\n c_address String,\n c_comment String,\n c_custkey Int32 NOT NULL, -- Identifier\n c_mktsegment String ,\n c_name String ,\n c_nationkey Int32 , -- FK to N_NATIONKEY\n c_phone String ,\n PRIMARY KEY (c_custkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/lineitem` (\n l_comment String ,\n l_commitdate Date ,\n l_discount Double , -- it should be Decimal(12, 2)\n l_extendedprice Double , -- it should be Decimal(12, 2)\n l_linenumber Int32 NOT NULL,\n l_linestatus String ,\n l_orderkey Int32 NOT NULL, -- FK to O_ORDERKEY\n l_partkey Int32 , -- FK to P_PARTKEY, first part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_SUPPKEY\n l_quantity Double , -- it should be Decimal(12, 2)\n l_receiptdate Date ,\n l_returnflag String ,\n l_shipdate Date ,\n l_shipinstruct String ,\n l_shipmode String ,\n l_suppkey Int32 , -- FK to S_SUPPKEY, second part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_PARTKEY\n l_tax Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (l_orderkey, l_linenumber)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/nation` (\n n_comment String ,\n n_name String ,\n n_nationkey Int32 NOT NULL, -- Identifier\n n_regionkey Int32 , -- FK to R_REGIONKEY\n PRIMARY KEY(n_nationkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/orders` (\n o_clerk String ,\n o_comment String ,\n o_custkey Int32 , -- FK to C_CUSTKEY\n o_orderdate Date ,\n o_orderkey Int32 NOT NULL, -- Identifier\n o_orderpriority String ,\n o_orderstatus String ,\n o_shippriority Int32 ,\n o_totalprice Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (o_orderkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/part` (\n p_brand String ,\n p_comment String ,\n p_container String ,\n p_mfgr String ,\n p_name String ,\n p_partkey Int32 NOT NULL, -- Identifier\n p_retailprice Double , -- it should be Decimal(12, 2)\n p_size Int32 ,\n p_type String ,\n PRIMARY KEY(p_partkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/partsupp` (\n ps_availqty Int32 ,\n ps_comment String ,\n ps_partkey Int32 NOT NULL, -- FK to P_PARTKEY\n ps_suppkey Int32 NOT NULL, -- FK to S_SUPPKEY\n ps_supplycost Double , -- it should be Decimal(12, 2)\n PRIMARY KEY(ps_partkey, ps_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/region` (\n r_comment String ,\n r_name String ,\n r_regionkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(r_regionkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/supplier` (\n s_acctbal Double , -- it should be Decimal(12, 2)\n s_address String ,\n s_comment String ,\n s_name String ,\n s_nationkey Int32 , -- FK to N_NATIONKEY\n s_phone String ,\n s_suppkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(s_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n", parameters: 0b 2026-01-10T12:06:53.938126Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703243887087223:3037], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:53.938272Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:53.938669Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703243887087228:3040], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:53.938728Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:53.938791Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703243887087229:3041], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:53.945042Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:53.978077Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703243887087232:3042], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2026-01-10T12:06:54.068017Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703248182054580:5400] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> KqpRboYql::OrderBy [GOOD] >> KqpRboYql::LeftJoins >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TNebiusAccessServiceTest::Authenticate |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:42.320532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:42.659345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:42.682878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:42.683333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:42.683428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:43.785922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:43.786124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:43.812483Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046795821540 != 1768046795821543 2026-01-10T12:06:43.833666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:43.889839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:44.018758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:44.598613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:06:44.599120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:06:44.625630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:44.626747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:06:44.645546Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:06:44.645636Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-10T12:06:44.661712Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:44.694944Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-10T12:06:44.695085Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-10T12:06:44.778767Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-10T12:06:44.778890Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-10T12:06:44.779191Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-10T12:06:44.779638Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-10T12:06:44.779700Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-10T12:06:44.780197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:06:44.781682Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-10T12:06:44.781781Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-10T12:06:44.781838Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-10T12:06:44.781896Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-10T12:06:44.783119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:06:44.795134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-10T12:06:44.796326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-10T12:06:44.813201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:06:44.815937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:06:44.816038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:44.817132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-10T12:06:44.845979Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-10T12:06:44.882411Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:06:44.882568Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-10T12:06:44.882872Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-10T12:06:44.882939Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-10T12:06:44.883007Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-10T12:06:44.883167Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-10T12:06:44.899243Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-10T12:06:44.899461Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-10T12:06:44.900135Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-10T12:06:44.900558Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-10T12:06:44.900666Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136357058827040}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-10T12:06:44.900748Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136357058827040}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-10T12:06:44.900907Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136357058827040}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-10T12:06:44.901015Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-10T12:06:44.901074Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-10T12:06:44.901119Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-10T12:06:44.901299Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-10T12:06:44.901344Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-10T12:06:44.901395Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-10T12:06:44.901575Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-10T12:06:44.901812Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-10T12:06:44.902333Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-10T12:06:44.902552Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... 94037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-10T12:07:05.411716Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-10T12:07:05.411868Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:07:05.411963Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-10T12:07:05.412011Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:07:05.412098Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-10T12:07:05.412585Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-10T12:07:05.423572Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:07:05.435046Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-10T12:07:05.456380Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2026-01-10T12:07:05.456560Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 34500 2026-01-10T12:07:05.456671Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2026-01-10T12:07:05.456896Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:05.456958Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715666 ssId 72057594046644480 seqNo 2:4 2026-01-10T12:07:05.457005Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715666 at tablet 72075186224037889 2026-01-10T12:07:05.457233Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:05.457424Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:05.457486Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:05.457518Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:05.457557Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-10T12:07:05.468380Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:05.468517Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:05.470053Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:07:05.470129Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715666 step# 35000 Status# 16 SEND to# [2:399:2398] Proxy marker# C1 2026-01-10T12:07:05.544550Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715666 has been planned 2026-01-10T12:07:05.544650Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2026-01-10T12:07:05.544690Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2026-01-10T12:07:05.544978Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 35500 in 0.500000s at 35.450000s 2026-01-10T12:07:05.545387Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 35000, txid# 281474976715666 marker# C2 2026-01-10T12:07:05.545471Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715666 stepId# 35000 Status# 17 SEND EvProposeTransactionStatus to# [2:399:2398] Proxy 2026-01-10T12:07:05.546117Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715666 at step 35000 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 35000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-10T12:07:05.546173Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:05.546410Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 35000, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:07:05.546798Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:05.546849Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:05.546901Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [35000:281474976715666] in PlanQueue unit at 72075186224037889 2026-01-10T12:07:05.547106Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 35000:281474976715666 keys extracted: 0 2026-01-10T12:07:05.547236Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:05.547488Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:05.547590Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-10T12:07:05.548008Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:05.562240Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 35000} 2026-01-10T12:07:05.562342Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:05.563078Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-10T12:07:05.563189Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-10T12:07:05.563278Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-10T12:07:05.563320Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2026-01-10T12:07:05.563369Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 acknowledged 2026-01-10T12:07:05.563651Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:05.563726Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [35000 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:07:05.563788Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2026-01-10T12:07:05.563886Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:05.573486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2026-01-10T12:07:05.586881Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2026-01-10T12:07:05.586991Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:07:05.587623Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2026-01-10T12:07:05.587737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2026-01-10T12:07:05.595175Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2026-01-10T12:07:05.595867Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:07:05.628925Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:07:05.629140Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-10T12:07:05.631098Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:07:05.632024Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:07:05.639628Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-10T12:07:05.639722Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-10T12:07:05.639858Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-10T12:07:05.640000Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-10T12:07:05.640149Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TNebiusAccessServiceTest::Authenticate [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::BasicJoins [GOOD] Test command err: Trying to start YDB, gRPC: 21375, MsgBus: 17603 2026-01-10T12:05:53.499496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702986009218984:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:53.500046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:53.534391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:53.907483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:53.913303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.913464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.915528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.121542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702986009218743:2081] 1768046753452944 != 1768046753452947 2026-01-10T12:05:54.126073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:54.139691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:54.328050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:54.328213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:54.328236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:54.328340Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:54.487515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:55.237468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:55.245161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:05:58.098277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703007484056118:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.098416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.098812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703007484056128:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.098888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:58.484805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:05:58.499692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702986009218984:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:58.499759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:59.335746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:05:59.336009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:05:59.336336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:05:59.336386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:05:59.336598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:05:59.336990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:05:59.337139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:05:59.337230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:05:59.337351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:05:59.337350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:05:59.337458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:05:59.337461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:05:59.337548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:05:59.337557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:05:59.337637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:05:59.337676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:05:59.337752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:05:59.337776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:05:59.337862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:05:59.337881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:05:59.337952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:05:59.337977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:05:59.338051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7593703007484056695:2333];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:05:59.338073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593703007484056698:2336];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_r ... 81474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.856371Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.871357Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.871445Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.871470Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.872127Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.872174Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.872214Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882249Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882249Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882308Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882330Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882339Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.882349Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.891454Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.891762Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.891798Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.895518Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.895597Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.895616Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.901066Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.901147Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.901168Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.906280Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.906353Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.906394Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-10T12:06:28.944813Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwsk0jf42cgecp1eq8dv7t", SessionId: ydb://session/3?node_id=3&id=MTA0YzYwZDItNmFhN2YzMzAtZmZkZWJlZjUtYzEzN2Q3YjI=, Slow query, duration: 13.628148s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/t1` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n\n CREATE TABLE `/Root/t2` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n\n CREATE TABLE `/Root/t3` (\n a Int64\tNOT NULL,\n\t b Int64,\n primary key(a)\n ) WITH (Store = Column);\n ", parameters: 0b 2026-01-10T12:06:29.131969Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703140485429193:3359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:29.132070Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:29.132436Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703140485429198:3362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:29.132484Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703140485429199:3363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:29.132752Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:29.146372Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:29.185604Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703140485429202:3364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:06:29.270203Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703140485429255:6615] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:01.706688Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593703277924391536:11473], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:135: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)', code: 1 2026-01-10T12:07:01.710794Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YmJkYmNiNmItMTQzMjI5NWQtOWE3NDM3YzMtZDg1NDNkMmM=, ActorId: [3:7593703277924391533:11471], ActorState: ExecuteState, LegacyTraceId: 01kekwv06caac0mm98w46jbmbr, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 135 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)\'" end_position { row: 3 column: 135 } issue_code: 1 } } tx_id# trace_id# 2026-01-10T12:07:01.919476Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593703277924391553:11482], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:143: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported, code: 1 2026-01-10T12:07:01.924618Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MmUxNTAzNzgtZTQyMDhmYzAtOWJjZTBhY2ItZjhiYzZmMDc=, ActorId: [3:7593703277924391550:11480], ActorState: ExecuteState, LegacyTraceId: 01kekwv0cg5czmzke6m53hm454, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 143 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported" end_position { row: 3 column: 143 } issue_code: 1 } } tx_id# trace_id# |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-10T12:07:08.248514Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0b8b1e2ed0] Connect to grpc://localhost:18294 2026-01-10T12:07:08.370512Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0b8b1e2ed0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2026-01-10T12:07:08.404422Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0b8b1e2ed0] Status 7 Permission Denied 2026-01-10T12:07:08.412258Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0b8b1e2ed0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2026-01-10T12:07:08.415382Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0b8b1e2ed0] Response AuthenticateResponse { account { user_account { id: "1234" } } } |84.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> KqpRboYql::Aggregation-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:48.518540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:48.697079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:48.708640Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:48.709194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:48.709320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:49.142997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:49.143189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:49.151069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046802238297 != 1768046802238300 2026-01-10T12:06:49.165763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:49.225361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:49.340847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:49.688867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:49.709635Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:49.840940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:49.899633Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:900:2774] 2026-01-10T12:06:49.899959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:49.956359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:49.956568Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:49.958815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:49.958954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:49.959020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:49.959524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:49.959849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:49.959934Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-10T12:06:49.960476Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-10T12:06:49.960732Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:49.973002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:49.973140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:49.974749Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:06:49.974844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:06:49.974904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:06:49.975226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:49.975555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:49.975620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2776] in generation 1 2026-01-10T12:06:49.976030Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-10T12:06:49.976346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:49.986981Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-10T12:06:49.987221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:49.997857Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:49.997994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:49.999711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-10T12:06:49.999795Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-10T12:06:49.999846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-10T12:06:50.000217Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:50.000398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:50.000485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-10T12:06:50.001019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:50.001106Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:50.002707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:06:50.002792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:06:50.002859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:06:50.003179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:50.003299Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:50.003365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-10T12:06:50.017196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:50.071407Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:50.071709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:50.071853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-10T12:06:50.071902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:50.071950Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:50.071992Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:50.072541Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:50.072618Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:06:50.072723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:50.072800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-10T12:06:50.072832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:06:50.072862Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:06:50.072896Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:06:50.073335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:50.073380Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-10T12:06:50.073442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:50.073503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-10T12:06:50.073531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-10T12:06:50.073555Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-10T12:06:50.073610Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:06:50.073906Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:50.074034Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:50.074168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:50.074221Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:06:50.074276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... 2:07:06.245243Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:06.245949Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:07:06.246018Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:06.246093Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:07:06.246452Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T12:07:06.246643Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:06.247390Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:07:06.247507Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:07:06.248037Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:07:06.248869Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:06.250752Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:07:06.250843Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:06.254288Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:07:06.254466Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:07:06.256529Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:07:06.256931Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:07:06.256999Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:07:06.257059Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:07:06.257149Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:07:06.257221Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:07:06.257342Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:06.260335Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:07:06.260654Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:07:06.260744Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:07:06.274938Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:06.275081Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:06.275171Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:06.277085Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:06.277329Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:06.282719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:06.292360Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:07:06.433128Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:07:06.445367Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:07:06.485015Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:06.607437Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:07:06.608034Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:07:06.608300Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-10T12:07:06.620739Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:06.833482Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-10T12:07:06.833711Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=3 2026-01-10T12:07:06.846510Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:06.984057Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-10T12:07:06.984871Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=3 2026-01-10T12:07:06.996776Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:07.000962Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2026-01-10T12:07:07.015863Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-10T12:07:07.015977Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:07.017953Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:07:07.018391Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:07:07.018680Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:07:07.018736Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:07:07.018799Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710663] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:07:07.019092Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:07:07.019181Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:07:07.020006Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710663, MessageQuota: 1 2026-01-10T12:07:07.023487Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:07:07.023719Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710663, PendingAcks: 0 2026-01-10T12:07:07.023791Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710663, MessageQuota: 0 2026-01-10T12:07:07.083537Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:07:07.083650Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710663, at: 72075186224037888 2026-01-10T12:07:07.084398Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:07:07.084459Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:07:07.084509Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710663] at 72075186224037888 for ReadTableScan 2026-01-10T12:07:07.084680Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:07.084762Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:07:07.084814Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> KqpRboYql::TPCH_YDB_PERF [GOOD] >> KqpRboYql::TPCH_YQL |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 9462, MsgBus: 15708 2026-01-10T12:04:10.026001Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702543578686509:2182];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:10.027418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:10.074380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:04:10.416339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:10.422811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:10.422933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:10.456728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:10.532335Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:10.635298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:10.651520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:10.651549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:10.651556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:10.651642Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:11.022943Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:11.295547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:11.352383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:11.578155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:11.793655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:11.895370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.015920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702560758557418:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.016026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.016379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702560758557428:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.016438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.409680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.455477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.497719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.558436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.615134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.676499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.736557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.801715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:14.887429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702560758558305:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.887510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.887700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702560758558310:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.887719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702560758558311:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.887765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:14.890963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:14.901266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702560758558314:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:04:14.982802Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702560758558365:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:04:15.017572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702543578686509:2182];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:15.017652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:17.018674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshar ... 841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:50.877788Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:50.882787Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:50.882931Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:50.885814Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593703230123491286:2081] 1768046810659823 != 1768046810659826 2026-01-10T12:06:50.885998Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:51.029224Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:51.112317Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:51.249034Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:51.249071Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:51.249088Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:51.249227Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:51.670683Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:52.404536Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:52.434814Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:52.593024Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:52.978168Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:53.161174Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:55.662480Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593703230123491315:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:55.662620Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:58.756122Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703264483231576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:58.756278Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:58.760280Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703264483231586:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:58.760416Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:58.914747Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.029800Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.154804Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.258850Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.383084Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.501823Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.645579Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:59.809741Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:00.076772Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703273073167076:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:00.076965Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:00.077600Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703273073167081:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:00.077677Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703273073167082:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:00.077742Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:00.086258Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:00.157710Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593703273073167085:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:07:00.244060Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593703273073167136:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:05.814510Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768046825839, txId: 281474976710673] shutting down 2026-01-10T12:07:05.862819Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:07:05.862851Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:06.458188Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768046826476, txId: 281474976710675] shutting down |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink >> JsonChangeRecord::DataChange [GOOD] |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::Aggregation-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5022, MsgBus: 27882 2026-01-10T12:05:52.504535Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702977857717585:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:52.518115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:05:52.882166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:52.882264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:52.940053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:05:52.964670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:52.968602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:53.012151Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702977857717563:2081] 1768046752488360 != 1768046752488363 2026-01-10T12:05:53.162313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:53.199280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:53.199302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:53.199310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:53.199410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:53.544431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:53.808410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:57.194373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332554938:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.194495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.194976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332554948:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.195048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.504895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702977857717585:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:05:57.504969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:05:57.574463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.777671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332555043:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.779521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.783225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332555046:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.783321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.809152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:57.968069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332555139:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.968202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.968740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332555144:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.968788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702999332555145:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.969069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:05:57.973570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:05:57.999126Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702999332555148:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:05:58.152005Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703003627522495:2665] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22021, MsgBus: 27587 2026-01-10T12:06:00.144857Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703013703785052:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:00.144904Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:00.185225Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:00.395560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:00.395643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:00.418805Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:00.433332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:00.475134Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:00.604285Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:00.604309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:00.604319Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:00.604403Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:01.160515Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:01.183062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:01.189600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:04.377633Z node 2 :K ... ce pool default not found or you don't have access permissions } 2026-01-10T12:06:19.234962Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703096700428129:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.234996Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:19.249228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:19.279267Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703096700428132:3133], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:06:19.358118Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703096700428185:5238] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:22.150180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:06:22.150212Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 17584, MsgBus: 64772 2026-01-10T12:06:52.503459Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703239074924337:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:52.508357Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:52.508689Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:52.868268Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:52.988474Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:53.039121Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:53.039227Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:53.040460Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703239074924102:2081] 1768046812374910 != 1768046812374913 2026-01-10T12:06:53.100479Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:53.148201Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:53.396823Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:53.396852Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:53.396866Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:53.396967Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:53.424260Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:53.579107Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:54.932262Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:54.938398Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:57.426522Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593703239074924337:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:57.426607Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:01.709447Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703277729630671:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.709542Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.710034Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703277729630681:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.710085Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.754201Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:01.892263Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703277729630773:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.892432Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.896627Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703277729630776:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.896716Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:01.929807Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:02.048322Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703282024598149:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:02.048417Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:02.051905Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703282024598154:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:02.051989Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703282024598155:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:02.052194Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:02.058568Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:02.100334Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703282024598158:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:07:02.176339Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703282024598209:2659] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:07.968252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:07:07.968285Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:55.524728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:55.749791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:55.765447Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:55.765870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:55.765973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:56.429862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:56.430033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:56.445255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046808863832 != 1768046808863835 2026-01-10T12:06:56.517429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:56.585341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:56.711580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:57.208410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:57.225605Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:57.375711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:57.471102Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:900:2774] 2026-01-10T12:06:57.471398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:57.638619Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:57.638818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:57.660867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:57.661006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:57.661067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:57.661457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:57.661784Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:57.661843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-10T12:06:57.662211Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-10T12:06:57.662446Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:57.686882Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:57.687009Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:57.693107Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:06:57.693216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:06:57.693287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:06:57.693707Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:57.694054Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:57.694112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2776] in generation 1 2026-01-10T12:06:57.694498Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-10T12:06:57.694704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:57.705470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-10T12:06:57.705687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:57.716971Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:57.717094Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:57.718813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-10T12:06:57.718885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-10T12:06:57.718978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-10T12:06:57.719272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:57.719432Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:57.719491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-10T12:06:57.719937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:57.720018Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:57.721287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:06:57.721354Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:06:57.721398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:06:57.721665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:57.721763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:57.721827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-10T12:06:57.732964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:57.803534Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:57.803751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:57.803864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-10T12:06:57.803914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:57.803965Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:57.804010Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:57.809229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:57.809323Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:06:57.809401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:57.809486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-10T12:06:57.809518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:06:57.809544Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:06:57.809571Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:06:57.810035Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:57.810079Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-10T12:06:57.810131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:57.810184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-10T12:06:57.810208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-10T12:06:57.810228Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-10T12:06:57.810266Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:06:57.810548Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:57.810660Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:57.810823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:57.810853Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:06:57.810899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:07:11.990331Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:11.992210Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:933:2795] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-10T12:07:11.993691Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:966:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-10T12:07:11.993936Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1282: Upload rows: got OK from shard 72075186224037889 description: 2026-01-10T12:07:11.994112Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1412: completed with status SUCCESS >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::EmptyCluster |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> ExternalBlobsMultipleChannels::SingleChannel >> KqpSystemView::PartitionStatsRanges |84.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:00.424401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:00.573795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:00.583163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:00.583634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:00.583742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:00.996874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:00.997042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:01.004747Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046816906822 != 1768046816906825 2026-01-10T12:07:01.017933Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.069077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:01.158316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:01.498176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:07:01.498598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:07:01.500476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:01.501398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:07:01.503380Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:07:01.503446Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-10T12:07:01.516903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:01.543809Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-10T12:07:01.543931Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-10T12:07:01.620165Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-10T12:07:01.620274Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-10T12:07:01.620547Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-10T12:07:01.620987Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-10T12:07:01.621052Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-10T12:07:01.621528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:07:01.622931Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-10T12:07:01.623024Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-10T12:07:01.623086Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-10T12:07:01.623125Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-10T12:07:01.636658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:07:01.636811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-10T12:07:01.637991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-10T12:07:01.645663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:07:01.647263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:07:01.647402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:01.656790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-10T12:07:01.678874Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-10T12:07:01.726211Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:07:01.726359Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-10T12:07:01.726623Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-10T12:07:01.726737Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-10T12:07:01.726818Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-10T12:07:01.727037Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-10T12:07:01.746001Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-10T12:07:01.746230Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-10T12:07:01.746908Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-10T12:07:01.747390Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-10T12:07:01.747534Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137172397970208}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-10T12:07:01.747613Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137172397970208}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-10T12:07:01.747771Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137172397970208}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-10T12:07:01.747910Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-10T12:07:01.748000Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-10T12:07:01.748063Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-10T12:07:01.759699Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-10T12:07:01.759759Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-10T12:07:01.759850Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-10T12:07:01.759997Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-10T12:07:01.760228Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-10T12:07:01.760401Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-10T12:07:01.760652Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... reason: , at schemeshard: 72057594046644480 2026-01-10T12:07:12.156657Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2026-01-10T12:07:12.160991Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:07:12.161175Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2026-01-10T12:07:12.161266Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2026-01-10T12:07:12.162686Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:07:12.162818Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:07:12.174359Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:326:2368] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:399:2398] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-10T12:07:12.174823Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:07:12.176258Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:07:12.176438Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:07:12.184644Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-10T12:07:12.184772Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-10T12:07:12.185297Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:07:12.185385Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-10T12:07:12.185445Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:07:12.185529Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-10T12:07:12.186062Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-10T12:07:12.199155Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:07:12.202380Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:07:12.202505Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 4500 Status# 16 SEND to# [2:399:2398] Proxy marker# C1 2026-01-10T12:07:12.217028Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-10T12:07:12.307985Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2026-01-10T12:07:12.308127Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2026-01-10T12:07:12.308228Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2026-01-10T12:07:12.308571Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 5000 in 0.500000s at 4.950000s 2026-01-10T12:07:12.309112Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 4500, txid# 281474976715665 marker# C2 2026-01-10T12:07:12.309207Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 4500 Status# 17 SEND EvProposeTransactionStatus to# [2:399:2398] Proxy 2026-01-10T12:07:12.309968Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 4500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 4500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-10T12:07:12.310036Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:12.310242Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 4500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:07:12.310771Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:12.310833Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:07:12.310916Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4500:281474976715665] in PlanQueue unit at 72075186224037889 2026-01-10T12:07:12.311159Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 4500:281474976715665 keys extracted: 0 2026-01-10T12:07:12.311313Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:12.311594Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:07:12.311693Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-10T12:07:12.316223Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:12.319368Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 4500} 2026-01-10T12:07:12.319480Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:12.319913Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-10T12:07:12.320065Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-10T12:07:12.320147Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-10T12:07:12.320189Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2026-01-10T12:07:12.320246Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 acknowledged 2026-01-10T12:07:12.320724Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:12.320821Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:07:12.320904Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2026-01-10T12:07:12.321033Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:12.325959Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2026-01-10T12:07:12.335165Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-10T12:07:12.335328Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2026-01-10T12:07:12.335837Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2026-01-10T12:07:12.335941Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:07:12.339276Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2026-01-10T12:07:12.340616Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:07:12.356890Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:07:12.357205Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-10T12:07:12.359573Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:07:12.365566Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:07:12.366268Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-10T12:07:12.366377Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-10T12:07:12.366633Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-10T12:07:12.366982Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-10T12:07:12.367177Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |84.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] |84.7%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall |84.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |84.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> TTransferTests::Alter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:54.998905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:55.120812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:55.132654Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:55.133202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:55.133308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:55.535996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:55.536193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:55.543516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046751493073 != 1768046751493076 2026-01-10T12:05:55.559811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:55.613237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:55.724424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:56.054773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:56.069549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:56.174065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:56.207336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:56.208163Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:56.208407Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:05:56.208637Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:56.252370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:56.253220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:56.253351Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:56.255472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:56.255591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:56.255675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:56.259100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:56.259304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:56.259439Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:05:56.270298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:56.315596Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:05:56.315826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:56.315944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:05:56.315980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:56.316030Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:05:56.316102Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:56.316371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:56.316427Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:56.316774Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:05:56.316868Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:05:56.316965Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:56.317005Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:56.317065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:56.317102Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:56.317149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:56.317198Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:05:56.317254Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:56.317360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:56.317410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:56.317458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:05:56.317558Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:05:56.317599Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:05:56.317701Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:05:56.317954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:05:56.318027Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:05:56.318155Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:05:56.318209Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:05:56.318251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:05:56.318290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:05:56.318326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:05:56.318649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:05:56.318692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:05:56.318729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:05:56.318766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:56.318829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:05:56.318878Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:05:56.318912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:05:56.318963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:05:56.318993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:05:56.320769Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:05:56.320820Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:05:56.332808Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:12.463023Z node 10 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:07:12.463169Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:921:2784], Recipient [10:1152:2958]: {TEvReadSet step# 3001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-10T12:07:12.463206Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:07:12.463246Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2026-01-10T12:07:12.744303Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1204:2997], Recipient [10:1152:2958]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:12.744421Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:12.744554Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [10:1203:2996], serverId# [10:1204:2997], sessionId# [0:0:0] 2026-01-10T12:07:12.761495Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1209:3000], Recipient [10:1152:2958]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:07:12.761736Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:07:12.761873Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-10T12:07:12.762030Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:07:12.762108Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:07:12.762178Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:12.762243Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:12.762325Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-10T12:07:12.762398Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:07:12.762456Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:12.762489Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:07:12.762520Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:07:12.762703Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:07:12.763079Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-10T12:07:12.763178Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[10:1209:3000], 0} after executionsCount# 1 2026-01-10T12:07:12.763254Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[10:1209:3000], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:12.763385Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[10:1209:3000], 0} finished in read 2026-01-10T12:07:12.763492Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:07:12.763527Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:07:12.763568Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:12.763603Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:12.763658Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:07:12.763689Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:12.763724Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-10T12:07:12.763792Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:07:12.763960Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:07:12.767508Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1209:3000], Recipient [10:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-10T12:07:12.767727Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1209:3000], Recipient [10:1152:2958]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:07:12.767807Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:07:12.767909Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:07:12.767981Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-10T12:07:12.768062Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:12.768122Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:07:12.768156Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:12.768190Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:07:12.768245Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-10T12:07:12.768289Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:12.768338Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:12.768394Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:07:12.768427Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:07:12.768545Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-10T12:07:12.768789Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-10T12:07:12.768839Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[10:1209:3000], 1} after executionsCount# 1 2026-01-10T12:07:12.768882Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[10:1209:3000], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:12.768972Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[10:1209:3000], 1} finished in read 2026-01-10T12:07:12.769030Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:12.769061Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:07:12.769090Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:07:12.769124Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:07:12.769173Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:12.769217Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:07:12.769249Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-10T12:07:12.769284Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:07:12.769380Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:07:12.771024Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1209:3000], Recipient [10:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:07:12.771100Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> StreamCreator::TopicAutoPartitioning |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpRboPg::Aggregation-ColumnStore [GOOD] >> KqpRboPg::AliasesRenames ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:48.882840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:49.309184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:49.318536Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:49.319000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:49.319125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:50.017763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:50.017945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:50.049319Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046803083314 != 1768046803083317 2026-01-10T12:06:50.096595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:50.189299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:50.332931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:50.935995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:50.973438Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:51.145734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:51.354792Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:06:51.356027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:06:51.376739Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-10T12:06:51.377102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:51.555197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:06:51.555680Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:06:51.556988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:51.557279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:51.559077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:51.559170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:51.559233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:51.559649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:51.559757Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:06:51.560026Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-10T12:06:51.576933Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:51.600335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:51.600441Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:919:2768] in generation 1 2026-01-10T12:06:51.600839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:06:51.602228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:51.612554Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:51.614596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:06:51.614684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:06:51.614737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:06:51.615084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:51.615326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:51.615431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-10T12:06:51.629166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:51.807001Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:51.807226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:51.807361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-10T12:06:51.807400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:51.807445Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:51.807488Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:51.807854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:51.807937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:51.808058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:51.812611Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:06:51.812770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:51.812885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-10T12:06:51.812938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:06:51.812967Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:06:51.812995Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:06:51.813551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:51.813637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:51.813922Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:51.814085Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:51.814239Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:51.814296Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:06:51.814346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:06:51.814415Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:06:51.814468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:06:51.814520Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:06:51.814573Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:51.814642Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:06:51.814737Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:06:51.814892Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:895:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:51.814937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:51.814987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:895:2771], sessionId# [0:0:0] 2026-01-10T12:06:51.815038Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:06:51.815112Z node 1 :TX_DAT ... on plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:13.791768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 72075186224037888 has finished 2026-01-10T12:07:13.804169Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-10T12:07:13.805310Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 72075186224037888 on unit DirectOp 2026-01-10T12:07:13.805363Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2026-01-10T12:07:14.061972Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:07:14.062055Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976710662 ProcessProposeKqpTransaction 2026-01-10T12:07:14.065700Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1401:3185], Recipient [3:889:2769]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:07:14.065919Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:07:14.065984Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v23000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v23000/18446744073709551615 ImmediateWriteEdgeReplied# v23000/18446744073709551615 2026-01-10T12:07:14.066029Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-10T12:07:14.066093Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2026-01-10T12:07:14.066196Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-10T12:07:14.066253Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:07:14.066298Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:14.066333Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:14.066401Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2026-01-10T12:07:14.066460Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-10T12:07:14.066482Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:14.066504Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:07:14.066526Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:07:14.066644Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:07:14.066894Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[3:1401:3185], 0} after executionsCount# 1 2026-01-10T12:07:14.066956Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[3:1401:3185], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:14.067046Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[3:1401:3185], 0} finished in read 2026-01-10T12:07:14.067105Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-10T12:07:14.067130Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:07:14.067165Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:14.067200Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:14.067245Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-10T12:07:14.067267Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:14.067297Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 72075186224037888 has finished 2026-01-10T12:07:14.067336Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:07:14.067434Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:07:14.069220Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1401:3185], Recipient [3:889:2769]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:07:14.069287Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2026-01-10T12:07:14.259702Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:07:14.259778Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976710663 ProcessProposeKqpTransaction 2026-01-10T12:07:14.263915Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1429:3209], Recipient [3:1113:2930]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2026-01-10T12:07:14.264237Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:07:14.264311Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8000/281474976715759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v7000/18446744073709551615 ImmediateWriteEdgeReplied# v7000/18446744073709551615 2026-01-10T12:07:14.264358Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-10T12:07:14.264430Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-10T12:07:14.264525Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:14.264568Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:07:14.264609Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:14.264651Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:07:14.264713Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-10T12:07:14.264774Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:14.264820Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:14.264863Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:07:14.264887Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:07:14.265002Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:07:14.265369Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[3:1429:3209], 0} after executionsCount# 1 2026-01-10T12:07:14.265440Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[3:1429:3209], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:14.265545Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[3:1429:3209], 0} finished in read 2026-01-10T12:07:14.265611Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:14.265640Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:07:14.265679Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:07:14.265718Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:07:14.265764Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:14.265786Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:07:14.265819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-10T12:07:14.265864Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:07:14.265976Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:07:14.267620Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1429:3209], Recipient [3:1113:2930]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:07:14.267689Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow [GOOD] >> DataShardWrite::LocksBrokenStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:06:51.012589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:51.136386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:06:51.146114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:06:51.147544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:51.147731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:06:51.565217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:51.565385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:51.572103Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046807362149 != 1768046807362152 2026-01-10T12:06:51.585883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:51.642610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:51.783172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:52.169278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:52.189728Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:52.302176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:52.361449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:06:52.362682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:06:52.363041Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:06:52.363337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:06:52.421938Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:06:52.422889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:06:52.423024Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:06:52.425262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:06:52.425355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:06:52.425454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:06:52.425910Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:06:52.426076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:06:52.426176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:06:52.437945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:06:52.488908Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:06:52.489222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:06:52.489403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:06:52.489450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:06:52.489515Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:06:52.489554Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:06:52.489802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:52.489850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:06:52.490241Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:06:52.490363Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:06:52.490463Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:06:52.490530Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:06:52.490586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:06:52.490639Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:06:52.490697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:06:52.490735Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:06:52.490798Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:06:52.490894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:52.490932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:06:52.490980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:06:52.491100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:06:52.491146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:06:52.491270Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:06:52.491569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:06:52.491635Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:06:52.491758Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:06:52.491816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:06:52.491856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:06:52.491915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:06:52.491957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:06:52.496977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:06:52.497079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:06:52.497332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:06:52.497379Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:06:52.497475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:06:52.497515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:06:52.497577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:06:52.497621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:06:52.497649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:06:52.499997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:06:52.500525Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:06:52.511457Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... pp:1883: Execution status for [4500:281474976710668] at 72075186224037889 is DelayComplete 2026-01-10T12:07:14.463603Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037889 executing on unit CompleteOperation 2026-01-10T12:07:14.463652Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976710668] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:07:14.463699Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:07:14.463740Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037889 is Executed 2026-01-10T12:07:14.463769Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:07:14.463800Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976710668] at 72075186224037889 has finished 2026-01-10T12:07:14.463850Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:14.463885Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2026-01-10T12:07:14.463922Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-10T12:07:14.463959Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2026-01-10T12:07:14.485099Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:14.485189Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:07:14.485234Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976710668] at 72075186224037889 on unit CompleteOperation 2026-01-10T12:07:14.485337Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1341:3112], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:07:14.485408Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:14.485839Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [3:1341:3112], Recipient [3:1168:2963]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976710668 Cleared: true 2026-01-10T12:07:14.485889Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-10T12:07:14.485991Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 4500} 2026-01-10T12:07:14.486049Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:07:14.486083Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:07:14.486166Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1168:2963], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:07:14.486202Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:07:14.486260Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:07:14.486298Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:14.486339Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976710668] at 72075186224037890 for WaitForStreamClearance 2026-01-10T12:07:14.486375Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037890 on unit WaitForStreamClearance 2026-01-10T12:07:14.486425Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [4500:281474976710668] at 72075186224037890 2026-01-10T12:07:14.486467Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037890 is Executed 2026-01-10T12:07:14.486503Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-10T12:07:14.486534Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976710668] at 72075186224037890 to execution unit ReadTableScan 2026-01-10T12:07:14.486565Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:07:14.486819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037890 is Continue 2026-01-10T12:07:14.486851Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:14.486881Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-10T12:07:14.486910Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2026-01-10T12:07:14.486941Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:07:14.487902Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [3:1360:3129], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:07:14.487946Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-10T12:07:14.488894Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710668, MessageQuota: 1 2026-01-10T12:07:14.489613Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976710668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:07:14.622679Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710668, PendingAcks: 0 2026-01-10T12:07:14.622789Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976710668, MessageQuota: 0 2026-01-10T12:07:14.633355Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:07:14.633424Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710668, at: 72075186224037890 2026-01-10T12:07:14.633631Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1168:2963], Recipient [3:1168:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:07:14.633677Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:07:14.633750Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:07:14.633790Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:07:14.633834Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976710668] at 72075186224037890 for ReadTableScan 2026-01-10T12:07:14.633877Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:07:14.633918Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [4500:281474976710668] at 72075186224037890 error: , IsFatalError: 0 2026-01-10T12:07:14.633964Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037890 is Executed 2026-01-10T12:07:14.634002Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037890 executing on unit ReadTableScan 2026-01-10T12:07:14.634038Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976710668] at 72075186224037890 to execution unit CompleteOperation 2026-01-10T12:07:14.634069Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037890 on unit CompleteOperation 2026-01-10T12:07:14.634267Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037890 is DelayComplete 2026-01-10T12:07:14.634304Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037890 executing on unit CompleteOperation 2026-01-10T12:07:14.634338Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976710668] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:07:14.634371Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976710668] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:07:14.634405Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976710668] at 72075186224037890 is Executed 2026-01-10T12:07:14.634447Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976710668] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:07:14.634478Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976710668] at 72075186224037890 has finished 2026-01-10T12:07:14.634514Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:07:14.634545Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-10T12:07:14.634579Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-10T12:07:14.634615Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:07:14.649980Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:07:14.650055Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:07:14.650092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976710668] at 72075186224037890 on unit CompleteOperation 2026-01-10T12:07:14.650156Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1341:3112], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:07:14.650207Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:00.944322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:00.944430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:00.944468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:00.944502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:00.944544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:00.944601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:00.944659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:00.944734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:00.945560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:00.945858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:01.034638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:01.034701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.060292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:01.060403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:01.060597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:01.072971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:01.073263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:01.073994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:01.074229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:01.076766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:01.076936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:01.078208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:01.078278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:01.078349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:01.078415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:01.078477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:01.078765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:01.281436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.282527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.282705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.282786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.282878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.282951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:01.283722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... vProgressOperation 2026-01-10T12:07:16.127866Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:07:16.127912Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2026-01-10T12:07:16.127966Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:16.128023Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2026-01-10T12:07:16.128210Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:07:16.129613Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:962:2904] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-10T12:07:16.129946Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-10T12:07:16.130007Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2026-01-10T12:07:16.130662Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-10T12:07:16.130806Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-10T12:07:16.130884Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2026-01-10T12:07:16.132140Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:07:16.132186Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-10T12:07:16.132266Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-10T12:07:16.132692Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269287424, Sender [6:140:2160], Recipient [6:277:2275] 2026-01-10T12:07:16.132749Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:07:16.132857Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:16.132998Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:16.133054Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-10T12:07:16.133219Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:07:16.133442Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:16.133531Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:07:16.133614Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:706: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2026-01-10T12:07:16.137887Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2026-01-10T12:07:16.140838Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:07:16.140915Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000041 first txId#104 countTxs#1 2026-01-10T12:07:16.140989Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000041 2026-01-10T12:07:16.141039Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-10T12:07:16.141253Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:127:2151], Recipient [6:127:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:07:16.141286Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:07:16.141390Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:16.141430Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:07:16.141653Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:16.141696Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:07:16.142000Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:07:16.142050Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:07:16.142152Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:16.142187Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:07:16.142220Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:07:16.142256Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:07:16.142286Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:07:16.142324Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:07:16.142365Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:07:16.142402Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:07:16.142457Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:07:16.142593Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:07:16.142630Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-10T12:07:16.142658Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-10T12:07:16.144322Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [6:211:2211], Recipient [6:127:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Version: 4 } 2026-01-10T12:07:16.144385Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-10T12:07:16.144508Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:07:16.144658Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:07:16.144719Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:07:16.144770Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:07:16.144827Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:07:16.144957Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:07:16.145008Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:16.149096Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:07:16.149811Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:07:16.149891Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:53.370707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:53.495168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:53.503170Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:53.503624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:53.503728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:53.880374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.880529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.886620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046749978542 != 1768046749978545 2026-01-10T12:05:53.898468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:53.949521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.114503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:54.535692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:54.552900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:54.698526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.806894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:54.812641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:54.813086Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:05:54.813409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:54.945046Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:54.945188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:54.946783Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:54.947133Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:05:54.947410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:54.963636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:54.972459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:54.972795Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:54.974766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:54.974871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:54.974994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:54.975396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:54.975578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:54.975685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:05:54.976319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:54.976445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:54.983031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:05:54.983127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:05:54.983176Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:05:54.983534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:54.983677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:54.983768Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:05:54.996817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:55.033466Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:05:55.033757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:55.033911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:05:55.033961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:55.034000Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:05:55.034070Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:55.034446Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:896:2773], Recipient [1:896:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:55.034500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:55.034602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:55.034649Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:05:55.034747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:55.034813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:05:55.034842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:05:55.034870Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:05:55.034897Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:05:55.035154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:900:2775], Recipient [1:900:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:55.035211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:55.035468Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:05:55.035585Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:05:55.035855Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:55.035911Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:55.035962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:55.036018Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:55.036060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:55.036118Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:05:55.036183Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:55.036246Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:05:55.036312Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:05:55.036500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:901:2776], Recipient [1:896:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:55.036546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:55.036598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:05:55.036665Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:05:55.036708Z node 1 :TX_DAT ... k, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:07:14.969725Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-01-10T12:07:14.969980Z node 11 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2026-01-10T12:07:14.970283Z node 11 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-10T12:07:14.970392Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:07:14.970573Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [11:921:2784], Recipient [11:919:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-10T12:07:14.970612Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:07:14.970648Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-10T12:07:15.233380Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1164:2967], Recipient [11:919:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:07:15.233625Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:07:15.233738Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-10T12:07:15.233878Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:07:15.233955Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:07:15.234022Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:15.234079Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:15.234147Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-10T12:07:15.234212Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:07:15.234248Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:15.234278Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:07:15.234310Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:07:15.234503Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:07:15.234855Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:07:15.234947Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1164:2967], 0} after executionsCount# 1 2026-01-10T12:07:15.235021Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1164:2967], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:15.235142Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1164:2967], 0} finished in read 2026-01-10T12:07:15.235241Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:07:15.235276Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:07:15.235306Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:15.235340Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:15.235393Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:07:15.235423Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:15.235461Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-10T12:07:15.235520Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:07:15.235659Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:07:15.236308Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1164:2967], Recipient [11:919:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:07:15.236380Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:07:15.236625Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1164:2967], Recipient [11:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2026-01-10T12:07:15.236740Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:07:15.236798Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-10T12:07:15.236881Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:15.236928Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:07:15.236959Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:15.236989Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:07:15.237034Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-10T12:07:15.237072Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:15.237100Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:15.237144Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:07:15.237170Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:07:15.237281Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-10T12:07:15.237508Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:07:15.237556Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[11:1164:2967], 1} after executionsCount# 1 2026-01-10T12:07:15.237597Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[11:1164:2967], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:15.237665Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[11:1164:2967], 1} finished in read 2026-01-10T12:07:15.237718Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:15.237747Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:07:15.237778Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:07:15.237810Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:07:15.237855Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:07:15.237879Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:07:15.237906Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-10T12:07:15.237938Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:07:15.238023Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:07:15.239576Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1164:2967], Recipient [11:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:07:15.239635Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig |84.8%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:52.714350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:52.849317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:52.858709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:52.859423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:52.859533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:53.334679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:53.334868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:53.342853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046748935300 != 1768046748935303 2026-01-10T12:05:53.367523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:53.422365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:53.521493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:53.869240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:53.884462Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:54.015694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:54.075482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:54.077049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:54.077375Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:05:54.077714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:54.156477Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:54.157450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:54.157605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:54.159805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:54.159911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:54.159985Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:54.160455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:54.160620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:54.160754Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:05:54.172875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:54.257279Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:05:54.257585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:54.257745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:05:54.257806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:54.257882Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:05:54.257930Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:54.258205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:54.258255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:54.258630Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:05:54.258757Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:05:54.258860Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:54.258927Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:54.259005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:54.259060Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:54.259108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:54.259144Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:05:54.259205Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:54.259338Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:54.259392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:54.259442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:05:54.259549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:05:54.259595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:05:54.259717Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:05:54.259982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:05:54.260919Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:05:54.261099Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:05:54.261160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:05:54.261200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:05:54.261241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:05:54.261282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:05:54.261684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:05:54.261740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:05:54.261779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:05:54.261819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:54.261888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:05:54.261950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:05:54.261989Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:05:54.262029Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:05:54.262082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:05:54.264266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:05:54.264325Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:05:54.275887Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.459664Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1168:2976], serverId# [10:1169:2977], sessionId# [0:0:0] 2026-01-10T12:07:15.459794Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1167:2975], Recipient [10:943:2797]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-10T12:07:15.473911Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1172:2980], Recipient [10:943:2797]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.473986Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.474031Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1171:2979], serverId# [10:1172:2980], sessionId# [0:0:0] 2026-01-10T12:07:15.474267Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1170:2978], Recipient [10:943:2797]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-10T12:07:15.474443Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-10T12:07:15.474489Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:07:15.474528Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-10T12:07:15.474586Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2026-01-10T12:07:15.474681Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-10T12:07:15.474718Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2026-01-10T12:07:15.474752Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:15.474785Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-10T12:07:15.474836Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2026-01-10T12:07:15.474873Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-10T12:07:15.474899Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:15.474927Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2026-01-10T12:07:15.474952Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2026-01-10T12:07:15.475046Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-10T12:07:15.475233Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[10:1170:2978], 1002} after executionsCount# 1 2026-01-10T12:07:15.475285Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[10:1170:2978], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:15.475366Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[10:1170:2978], 1002} finished in read 2026-01-10T12:07:15.475425Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-10T12:07:15.475453Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2026-01-10T12:07:15.475481Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:07:15.475510Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:07:15.475567Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-10T12:07:15.475596Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:07:15.475622Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037890 has finished 2026-01-10T12:07:15.475657Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-10T12:07:15.475750Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-10T12:07:15.485492Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1175:2983], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.485582Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.485632Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1174:2982], serverId# [10:1175:2983], sessionId# [0:0:0] 2026-01-10T12:07:15.485819Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1173:2981], Recipient [10:936:2792]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-10T12:07:15.487634Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1178:2986], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.487690Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:15.487733Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1177:2985], serverId# [10:1178:2986], sessionId# [0:0:0] 2026-01-10T12:07:15.487978Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1176:2984], Recipient [10:936:2792]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-10T12:07:15.488186Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-10T12:07:15.488246Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:07:15.488286Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-10T12:07:15.488349Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2026-01-10T12:07:15.488448Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-10T12:07:15.488483Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2026-01-10T12:07:15.488517Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:15.488552Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-10T12:07:15.488607Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2026-01-10T12:07:15.488645Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-10T12:07:15.488674Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:15.488700Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2026-01-10T12:07:15.488731Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2026-01-10T12:07:15.488832Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-10T12:07:15.489032Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[10:1176:2984], 1003} after executionsCount# 1 2026-01-10T12:07:15.489100Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[10:1176:2984], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:15.489179Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[10:1176:2984], 1003} finished in read 2026-01-10T12:07:15.489243Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-10T12:07:15.489273Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2026-01-10T12:07:15.489305Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2026-01-10T12:07:15.489373Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2026-01-10T12:07:15.489434Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-10T12:07:15.489462Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2026-01-10T12:07:15.489489Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037891 has finished 2026-01-10T12:07:15.489541Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-10T12:07:15.489643Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |84.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TMonitoringTests::ValidActorId |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::MkDir >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:04.387569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:04.387706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:04.387762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:04.387836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:04.387887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:04.387947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:04.388014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:04.401047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:04.402305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:04.402791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:04.550260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:04.550351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:04.565683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:04.571549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:04.571802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:04.580913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:04.581165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:04.581982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:04.582399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:04.586367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:04.586618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:04.587967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:04.588044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:04.588201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:04.588263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:04.588320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:04.588583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:04.845398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.846687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.846841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.846929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:04.847917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 712:2697], Recipient [6:127:2151]: NKikimrTxColumnShard.TEvNotifyTxCompletionResult Origin: 72075186233409546 TxId: 101 2026-01-10T12:07:22.170460Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5291: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2026-01-10T12:07:22.170530Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-10T12:07:22.170581Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:07:22.170717Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-10T12:07:22.170901Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:07:22.173323Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:07:22.173407Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:07:22.173459Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2026-01-10T12:07:22.173620Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:127:2151], Recipient [6:127:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:07:22.173659Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:07:22.173733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:07:22.173782Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:07:22.173925Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:22.173965Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:07:22.174005Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:07:22.174055Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:07:22.174096Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:07:22.174145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:07:22.174227Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:745:2721] message: TxId: 101 2026-01-10T12:07:22.174285Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:07:22.174329Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:07:22.174371Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:07:22.174559Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:07:22.181285Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:07:22.181426Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [6:745:2721] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2026-01-10T12:07:22.181652Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:07:22.181707Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:746:2722] 2026-01-10T12:07:22.181952Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:748:2724], Recipient [6:127:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:07:22.181998Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:07:22.182043Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:07:22.182812Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [6:793:2761], Recipient [6:127:2151]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2026-01-10T12:07:22.182874Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:07:22.192197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:07:22.192622Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:361: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2026-01-10T12:07:22.192725Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2026-01-10T12:07:22.192980Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:07:22.204158Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:22.204502Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2026-01-10T12:07:22.204574Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:07:22.204963Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:07:22.205019Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:07:22.205409Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [6:799:2767], Recipient [6:127:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:22.205470Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:22.205517Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:07:22.205659Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [6:745:2721], Recipient [6:127:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-10T12:07:22.205692Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:07:22.205774Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:07:22.205880Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:07:22.205930Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:797:2765] 2026-01-10T12:07:22.206135Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:799:2767], Recipient [6:127:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:07:22.206170Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:07:22.206210Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-10T12:07:22.206591Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [6:800:2768], Recipient [6:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:07:22.206644Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:07:22.206747Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:22.206945Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 199us result status StatusPathDoesNotExist 2026-01-10T12:07:22.207105Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> ActorHandler::HttpOk >> TSchemeShardUserAttrsTest::MkDir [GOOD] |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> Other::TraceInvalidTokenForbidden ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:22.475474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:22.475592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.475634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:22.475673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:22.475740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:22.475776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:22.475878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.475965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:22.478084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:22.478481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:22.598220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:22.598315Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:22.613850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:22.619933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:22.620244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:22.634047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:22.634302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:22.635147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:22.635645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:22.652731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:22.652957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:22.654323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:22.654380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:22.654493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:22.654550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:22.654699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:22.655004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:22.886392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.887523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.887691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.887795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.887871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.887973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:22.888837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:07:24.276970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:24.277013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:24.277187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:07:24.277285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:07:24.277427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:24.277459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-10T12:07:24.277502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 39 2026-01-10T12:07:24.277578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 40 FAKE_COORDINATOR: Erasing txId 112 2026-01-10T12:07:24.278363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.278459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.278491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:07:24.278536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-10T12:07:24.278588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:07:24.279063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.279204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.279238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:07:24.279268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-10T12:07:24.279299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:07:24.279927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.280004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:07:24.280048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:07:24.280138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-10T12:07:24.280171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:07:24.280252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-10T12:07:24.280770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:07:24.280825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:07:24.280907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:07:24.283223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:07:24.283680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:07:24.285473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:07:24.285635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-10T12:07:24.286134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-10T12:07:24.286200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-10T12:07:24.286895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-10T12:07:24.287006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:07:24.287047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:903:2893] TestWaitNotification: OK eventTxId 112 2026-01-10T12:07:24.287964Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:24.288240Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 288us result status StatusSuccess 2026-01-10T12:07:24.288658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2026-01-10T12:07:24.292737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 38 PathVersion: 8 } ApplyIf { PathId: 39 PathVersion: 7 } ApplyIf { PathId: 40 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:07:24.292945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2026-01-10T12:07:24.293069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:07:24.295716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:24.296015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> MonPage::HttpOk |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |84.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |84.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboPg::AliasesRenames [GOOD] >> KqpRboPg::Bench_10Joins |84.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:22.794490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:22.794633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.794681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:22.794730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:22.794784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:22.794818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:22.794890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.794997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:22.795990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:22.796352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:23.060816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:23.060896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:23.116109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:23.116247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:23.116495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:23.174744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:23.175148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:23.176102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:23.176406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:23.185677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.185922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:23.187352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:23.187447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.187545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:23.187601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:23.187718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:23.188042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:23.405445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.406671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.406891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.406989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.407944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.408032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... StateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:24.885509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:24.885794Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 281us result status StatusSuccess 2026-01-10T12:07:24.886277Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:24.887035Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:24.887295Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 208us result status StatusSuccess 2026-01-10T12:07:24.887707Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:24.888412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:24.888616Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 206us result status StatusSuccess 2026-01-10T12:07:24.889061Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:24.889812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:24.890011Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 213us result status StatusSuccess 2026-01-10T12:07:24.890432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:22.990639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:22.990748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.990795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:22.990831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:22.990891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:22.990936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:22.991014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:22.991118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:22.992046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:22.992406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:23.095095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:23.095161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:23.113112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:23.113237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:23.113471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:23.124048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:23.124364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:23.125259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:23.125509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:23.128332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.128539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:23.129779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:23.129844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.129914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:23.129988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:23.130110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:23.130448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:23.412584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.413927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.414951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.415053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.415146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.415242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.415340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Operation and all the parts is done, operation id: 105:0 2026-01-10T12:07:25.235980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:07:25.244320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:07:25.244490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-10T12:07:25.244539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:07:25.244594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-10T12:07:25.247441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:07:25.257394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:07:25.257816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:25.257863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:25.258086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:07:25.258277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:25.258361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-10T12:07:25.258422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2026-01-10T12:07:25.259165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:07:25.259287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:07:25.259338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:07:25.259391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:07:25.259455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:07:25.260059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:07:25.268578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:07:25.268659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:07:25.268699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-10T12:07:25.268752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:07:25.268933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:07:25.269288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:07:25.269343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:07:25.269457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:07:25.278840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:07:25.279569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:07:25.279689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:07:25.280095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:07:25.280155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:07:25.280725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:07:25.280842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:07:25.280883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:810:2800] TestWaitNotification: OK eventTxId 105 2026-01-10T12:07:25.281664Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:25.281920Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 273us result status StatusPathDoesNotExist 2026-01-10T12:07:25.282135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:07:25.289080Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:25.289388Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 356us result status StatusSuccess 2026-01-10T12:07:25.289973Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] >> ActorPage::NoValidGroupForbidden >> DataShardWrite::LocksBrokenStats [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:07:23.085615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:07:23.085745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:23.085825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:07:23.085862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:07:23.085903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:07:23.085934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:07:23.086011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:07:23.086102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:07:23.087040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:07:23.087385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:07:23.218354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:07:23.218445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:23.254305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:07:23.254440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:07:23.254674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:07:23.312493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:07:23.312837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:07:23.313647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:23.313910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:07:23.320272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.320490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:07:23.321722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:23.321785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:23.321865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:23.321911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:07:23.322029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:23.322355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:07:23.634664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.643981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.644058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.648383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.648508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:07:23.648604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... alue: "ValA2" } } ApplyIf { PathId: 38 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:07:26.200526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:07:26.200739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-10T12:07:26.200813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-10T12:07:26.200984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:07:26.201081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:26.203836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:26.204139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2026-01-10T12:07:26.204404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:07:26.204460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:07:26.204525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-10T12:07:26.204676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:07:26.213114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:07:26.213326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-10T12:07:26.213759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:07:26.213900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:07:26.213951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-10T12:07:26.214180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:07:26.214227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:07:26.214278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:07:26.214325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:07:26.214390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:07:26.214486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:07:26.214538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:07:26.214719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:07:26.214758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:07:26.214807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:07:26.214871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:07:26.214912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-10T12:07:26.214948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:07:26.222195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:07:26.222265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:07:26.222512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:07:26.222575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:07:26.223209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:07:26.223346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:07:26.223401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:07:26.223442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:07:26.223485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:07:26.223600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:07:26.237572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:07:26.237951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:07:26.238001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:07:26.238520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:07:26.238640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:07:26.238681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:760:2750] TestWaitNotification: OK eventTxId 103 2026-01-10T12:07:26.239275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:07:26.239516Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 260us result status StatusSuccess 2026-01-10T12:07:26.239930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> Other::TraceHttpOk >> KqpSystemView::PartitionStatsRanges [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> ActorHandler::OptionsNoContent >> Other::TraceNoValidGroupForbidden >> TTxDataShardRecomputeKMeansScan::EmptyCluster [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest >> ActorPage::InvalidTokenForbidden >> ActorHandler::NoValidGroupForbidden |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |84.9%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:55.196559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:55.393819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:55.410914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:55.411336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:55.411415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:56.004210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:56.004356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:56.013468Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046750974071 != 1768046750974074 2026-01-10T12:05:56.026221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:56.081352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:56.190767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:56.530762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:56.546756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:56.660111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:56.719656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:56.720919Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:56.721246Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:05:56.721507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:56.778621Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:56.779448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:56.779571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:56.781480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:56.781585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:56.781658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:56.782051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:56.782201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:56.782293Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:05:56.797253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:56.837092Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:05:56.837319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:56.837436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:05:56.837482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:56.837532Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:05:56.837571Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:56.837815Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:56.837860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:56.838198Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:05:56.838287Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:05:56.838375Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:56.838414Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:56.838459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:56.838507Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:56.838554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:56.838589Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:05:56.838634Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:56.838730Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:56.838781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:56.838851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:05:56.838960Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:05:56.839006Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:05:56.839102Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:05:56.839322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:05:56.839397Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:05:56.839514Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:05:56.839564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:05:56.839606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:05:56.839641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:05:56.839694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:05:56.839993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:05:56.840030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:05:56.840064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:05:56.840120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:56.840174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:05:56.840227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:05:56.840271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:05:56.840304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:05:56.840333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:05:56.842219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:05:56.842270Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:05:56.853100Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 6715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:07:27.532952Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-10T12:07:27.533002Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:07:27.533034Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:07:27.533067Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:27.533101Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:27.533146Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/0 2026-01-10T12:07:27.533208Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-10T12:07:27.533244Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:07:27.533273Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:27.533303Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:07:27.533331Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:07:27.533364Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:07:27.533393Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:07:27.533418Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:07:27.533448Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:07:27.533483Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-10T12:07:27.533618Z node 11 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-10T12:07:27.533684Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-10T12:07:27.533755Z node 11 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:07:27.533856Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:07:27.533895Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:07:27.533958Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:07:27.534007Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:07:27.534052Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-10T12:07:27.534086Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:07:27.534118Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:27.534150Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:27.534202Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:07:27.534228Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:27.534258Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-10T12:07:27.534328Z node 11 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:07:27.534365Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:07:27.534435Z node 11 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:07:27.534519Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:07:27.537993Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [11:70:2117], Recipient [11:885:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 11 Status: STATUS_NOT_FOUND 2026-01-10T12:07:27.538579Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1108:2914], Recipient [11:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:27.538641Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:27.538717Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1107:2913], serverId# [11:1108:2914], sessionId# [0:0:0] 2026-01-10T12:07:27.538876Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [11:1106:2912], Recipient [11:885:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-10T12:07:27.540412Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1111:2917], Recipient [11:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:27.540505Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:07:27.540590Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1110:2916], serverId# [11:1111:2917], sessionId# [0:0:0] 2026-01-10T12:07:27.542996Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1109:2915], Recipient [11:885:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-10T12:07:27.543212Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:07:27.543301Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-10T12:07:27.543370Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-10T12:07:27.543482Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-10T12:07:27.543640Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:07:27.543718Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:07:27.543790Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:27.543850Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:27.543915Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:07:27.543978Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:07:27.544014Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:27.544045Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:07:27.544095Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:07:27.544237Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-10T12:07:27.544576Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1109:2915], 1000} after executionsCount# 1 2026-01-10T12:07:27.544680Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1109:2915], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:27.544817Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1109:2915], 1000} finished in read 2026-01-10T12:07:27.544915Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:07:27.544952Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:07:27.544986Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:27.545020Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:27.545079Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:07:27.545108Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:27.545147Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:07:27.545208Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:07:27.545372Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 15798, MsgBus: 23982 2026-01-10T12:07:15.542789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703336644493239:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:15.542856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:16.338109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:16.338219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:16.350884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:16.424243Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703336644493001:2081] 1768046835470883 != 1768046835470886 2026-01-10T12:07:16.457882Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:16.485483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:16.527409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:16.536042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:16.784545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:16.784577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:16.784592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:16.784708Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:17.705719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:17.752734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:17.851349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:18.195210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:18.554978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:18.720656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:20.550459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703336644493239:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:20.568740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:21.894504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703362414298656:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.894660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.896647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703362414298665:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.896722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.673945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.731877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.780373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.851420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.913814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.998079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:23.081453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:23.227343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:23.416791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703371004234132:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.416901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.417074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703371004234138:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.417111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.417118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703371004234137:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.423286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:23.452607Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703371004234141:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:07:23.553957Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703371004234200:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:27.681484Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768046847652, txId: 281474976710673] shutting down >> Other::UnknownPathNotFound >> BasicStatistics::TwoNodes |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus 2026-01-10 12:07:03,283 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:07:03,526 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 14503 59.0M 59.0M 33.0M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/003b72/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args 14587 575M 576M 571M └─ ydb-core-cms-ut_sentinel_unstable --trace-path-append /home/runner/.ya/build/build_root/d9xi/003b72/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/ytest.report.t Test command err: 2026-01-10T11:57:18.027443Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T11:57:18.027653Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T11:57:18.028053Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T11:57:18.028254Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T11:57:18.028474Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T11:57:18.029084Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T11:57:18.034722Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T11:57:18.052549Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... ze: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395070Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395224Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395358Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395514Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395668Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697090110 2026-01-10T12:07:03.395725Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:07:03.419154Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:07:03.419228Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:07:03.419358Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-10T12:07:03.419412Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-10T12:07:03.419455Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-10T12:07:03.419495Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-10T12:07:03.419534Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-10T12:07:03.419572Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-10T12:07:03.419609Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-10T12:07:03.419644Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-10T12:07:03.420257Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 2026-01-10T12:07:03.420877Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 2026-01-10T12:07:03.421096Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 2026-01-10T12:07:03.421270Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 2026-01-10T12:07:03.421434Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 2026-01-10T12:07:03.421592Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 697100110 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003b72/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003b72/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel_unstable/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] Test command err: 2026-01-10T12:07:17.269411Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703344133790511:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:17.269476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:17.371367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:17.935943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:17.965727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:17.965852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:18.125391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:18.171332Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703344133790299:2081] 1768046837201417 != 1768046837201420 2026-01-10T12:07:18.195346Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:18.213215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:18.221692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:18.908732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:18.908755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:18.908762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:18.908864Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:19.519015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:19.530311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:19.541713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:19.735082Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-10T12:07:22.267928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703344133790511:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:22.267988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:24.344250Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:07:24.344278Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-10T12:07:24.375744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:07:24.436710Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:07:24.436739Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-10T12:07:26.686794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:26.690027Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:26.936258Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:26.958786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:26.958870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:26.972249Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703381636667201:2081] 1768046846568255 != 1768046846568258 2026-01-10T12:07:26.998938Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:27.061348Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:27.358390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:27.657864Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:27.860581Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:27.860603Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:27.860609Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:27.860700Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:28.272707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:28.284512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:28.294490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:28.378765Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-10T12:07:32.027822Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7593703407406472059:2335] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:07:32.033625Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:07:32.033654Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-10T12:07:32.049692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:07:32.063752Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:07:32.063772Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> ActorHandler::HttpOk [GOOD] >> ActorHandler::InvalidTokenForbidden >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] Test command err: 2026-01-10T12:07:26.237452Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703382597700063:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:26.238270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:27.110321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:27.110495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:27.184697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:27.271658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:27.357685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:27.368841Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:27.408268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703382597700030:2081] 1768046846224705 != 1768046846224708 2026-01-10T12:07:27.452205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:27.628233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:27.628258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:27.628266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:27.628355Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:27.911455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:27.918855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:28.003883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:28.017663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:28.023676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> ReadSessionImplTest::CommonHandler [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:20.467103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:20.746680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:20.777483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:20.778048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:20.778162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:21.297834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:21.298012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:21.305406Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046835939466 != 1768046835939469 2026-01-10T12:07:21.319455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:21.373817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:21.495731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:21.898460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:21.913089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:22.049937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.412518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.412672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.412792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.413410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.413599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.419028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:22.572176Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:07:22.641549Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> MonPage::HttpOk [GOOD] >> MonPage::OptionsNoContent >> Other::TraceHttpOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:21.246806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:21.392534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:21.406357Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:21.407022Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:21.407205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:21.852672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:21.852875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:21.861679Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046835831898 != 1768046835831901 2026-01-10T12:07:21.876401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:21.933886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:22.070712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:22.482775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:22.502985Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:22.658336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:23.169180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:07:23.560806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.561003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.561156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.563765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.563882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:23.574872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:23.764569Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1028:2868], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:07:23.825660Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1086:2907] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> Other::TraceNoValidGroupForbidden [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> ActorHandler::OptionsNoContent [GOOD] >> ActorPage::HttpOk >> ActorPage::NoValidGroupForbidden [GOOD] >> ActorPage::OptionsNoContent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:19.892479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:20.257372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:20.282831Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:20.283343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:20.283467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:20.900265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:20.900476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:20.907607Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046834945618 != 1768046834945621 2026-01-10T12:07:20.921089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:20.986932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:21.125832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:21.529413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:21.543525Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:21.680996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:22.222932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:958:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.223080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.223181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.223981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:972:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.224096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:22.229716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:22.406262Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:973:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:07:22.504178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1030:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-10T12:06:27.615159Z :SettingsValidation INFO: Random seed for debugging is 1768046787615109 2026-01-10T12:06:28.345721Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703132920102182:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:28.345771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:28.446702Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703133859136859:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:28.446750Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:28.495544Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:28.495926Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:28.511023Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:29.181282Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:29.248603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:29.361617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:29.361726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:29.369168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:29.369257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:29.388799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:29.389608Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:06:29.452779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:29.634093Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:29.645598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:29.731756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:29.740186Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:29.754750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:30.000378Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.024227s 2026-01-10T12:06:30.085437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002e80/r3tmp/yandexLhYhoc.tmp 2026-01-10T12:06:30.085463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002e80/r3tmp/yandexLhYhoc.tmp 2026-01-10T12:06:30.085669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002e80/r3tmp/yandexLhYhoc.tmp 2026-01-10T12:06:30.085787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:30.201148Z INFO: TTestServer started on Port 20344 GrpcPort 3497 PQClient connected to localhost:3497 2026-01-10T12:06:30.811704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:30.840337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:33.352205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703132920102182:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:33.352306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:33.451489Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703133859136859:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:33.451572Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:35.381850Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163923908084:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.381976Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163923908095:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.382048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.383983Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163923908100:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.384094Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.397864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:35.518863Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703163923908099:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:06:36.043285Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703163923908131:2143] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:36.123471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:36.141149Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703162984874565:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:36.144456Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmJhNDgyMzUtZTJkODM5YzItYWZiZWQwZjAtZTlhZTYyNQ==, ActorId: [1:7593703162984874516:2334], ActorState: ExecuteState, LegacyTraceId: 01kekwt6t04xh1wtew3tm3e9nr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:06:36.146902Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:06:36.143568Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703168218875436:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:36.148020Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjNmYzNmNzMtYTc5NTFiOGItMzIyO ... tx events 2026-01-10T12:07:32.678168Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.678184Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:07:32.678213Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.678225Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:07:32.778704Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:07:32.778737Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.778752Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:07:32.778773Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.778787Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:07:32.879054Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:07:32.879086Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.879100Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:07:32.879118Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.879128Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:07:32.979507Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:07:32.979537Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.979553Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:07:32.979573Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:32.979583Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:07:33.080463Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:07:33.080497Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:33.080512Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:07:33.080530Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:07:33.080543Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:07:35.041661Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.041709Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.041743Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:07:35.042880Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:07:35.056454Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:07:35.056723Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.057324Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-10T12:07:35.093686Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.093728Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.093763Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:07:35.094395Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:07:35.095028Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:07:35.095223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.099843Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:07:35.101240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:07:35.101947Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-10T12:07:35.102057Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-10T12:07:35.102261Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:07:35.102313Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:07:35.102344Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:07:35.102393Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-10T12:07:35.104678Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.104716Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.104754Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:07:35.105112Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:07:35.110679Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:07:35.110917Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.112361Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:07:35.113289Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.113519Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:07:35.120230Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:07:35.120332Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:07:35.120458Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2026-01-10T12:07:35.125884Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.125929Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.125974Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:07:35.126573Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:07:35.127492Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:07:35.135754Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:35.136136Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:07:35.137267Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:07:35.137812Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:07:35.139411Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-10T12:07:35.139501Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:07:35.139576Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:07:35.139631Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-10T12:07:35.142870Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:07:35.142931Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:07:37.157471Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:37.157512Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:37.157550Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:07:37.244401Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:07:37.312444Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:07:37.318800Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:37.325565Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:07:37.335665Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:07:37.335791Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:07:37.335921Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless |84.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:52.249224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:52.400883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:52.419935Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:52.424725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:52.424897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:52.916343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:52.916495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:52.923119Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046749122351 != 1768046749122354 2026-01-10T12:05:52.941271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:52.989519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:53.107320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:53.475387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:05:53.497499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:53.624822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:05:53.685648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:05:53.686780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:05:53.687125Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:05:53.687414Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:05:53.797759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:05:53.798685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:05:53.798827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:05:53.803678Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:05:53.803788Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:05:53.803858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:05:53.804320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:05:53.804497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:05:53.804636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:05:53.816591Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:05:53.870891Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:05:53.871109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:05:53.871224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:05:53.871265Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:05:53.871299Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:05:53.871336Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:05:53.871563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:53.871629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:05:53.871959Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:05:53.872071Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:05:53.872498Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:05:53.872600Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:05:53.872650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:05:53.872686Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:05:53.872723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:05:53.872759Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:05:53.872817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:05:53.872914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:53.872964Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:05:53.873032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:05:53.873137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:05:53.873188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:05:53.873323Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:05:53.873612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:05:53.873676Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:05:53.873790Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:05:53.873853Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:05:53.873902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:05:53.873940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:05:53.874000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:05:53.874324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:05:53.874366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:05:53.874404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:05:53.874454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:05:53.874515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:05:53.874549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:05:53.874583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:05:53.874617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:05:53.874645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:05:53.876718Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:05:53.876774Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:05:53.888460Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... at 72075186224037889 2026-01-10T12:07:37.170124Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:921:2784], Recipient [12:918:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-10T12:07:37.170164Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:07:37.170227Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-10T12:07:37.170491Z node 12 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:07:37.170562Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:918:2782], Recipient [12:921:2784]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2026-01-10T12:07:37.170592Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:07:37.170620Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-01-10T12:07:37.492598Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1165:2968], Recipient [12:918:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:07:37.492887Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:07:37.493034Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-10T12:07:37.493214Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:07:37.493285Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:07:37.493342Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:37.493397Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:07:37.493459Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-10T12:07:37.493513Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:07:37.493544Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:37.493569Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:07:37.493598Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:07:37.493750Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:07:37.494091Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:07:37.494169Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[12:1165:2968], 0} after executionsCount# 1 2026-01-10T12:07:37.494260Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[12:1165:2968], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:37.494370Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[12:1165:2968], 0} finished in read 2026-01-10T12:07:37.494485Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:07:37.494514Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:07:37.494541Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:07:37.494581Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:07:37.494642Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:07:37.494669Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:07:37.494701Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-10T12:07:37.494763Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:07:37.494903Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:07:37.495585Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1165:2968], Recipient [12:918:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:07:37.495659Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:07:37.495800Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1165:2968], Recipient [12:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-10T12:07:37.495902Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:07:37.495972Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-10T12:07:37.496037Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:37.496064Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:07:37.497744Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:07:37.497783Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:07:37.497841Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-10T12:07:37.497877Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:37.497907Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:07:37.497931Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:07:37.497976Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:07:37.498089Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-10T12:07:37.498332Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:07:37.498377Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[12:1165:2968], 1} after executionsCount# 1 2026-01-10T12:07:37.498431Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[12:1165:2968], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:07:37.498503Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[12:1165:2968], 1} finished in read 2026-01-10T12:07:37.498560Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:37.498607Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:07:37.498641Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:07:37.498672Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:07:37.498736Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-10T12:07:37.498763Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:07:37.498792Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-10T12:07:37.498828Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:07:37.498919Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:07:37.500516Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1165:2968], Recipient [12:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:07:37.500574Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> Other::UnknownPathNotFound [GOOD] >> ActorPage::InvalidTokenForbidden [GOOD] >> ActorPage::NoUseAuthOk |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TestKinesisHttpProxy::TestPing >> TestYmqHttpProxy::TestGetQueueUrl >> TestYmqHttpProxy::TestCreateQueue >> ActorHandler::NoValidGroupForbidden [GOOD] |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TestKinesisHttpProxy::MissingAction >> THealthCheckTest::TestStateStorageOk [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TestYmqHttpProxy::TestSendMessage >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestKinesisHttpProxy::DifferentContentTypes >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpRboYql::LeftJoins [GOOD] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [FAIL] >> ActorHandler::InvalidTokenForbidden [GOOD] >> THealthCheckTest::TestStateStorageYellow [GOOD] >> KqpPg::DropTablePg [GOOD] >> MonPage::OptionsNoContent [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> ActorPage::HttpOk [GOOD] >> KqpPg::DropTablePgMultiple >> ActorPage::OptionsNoContent [GOOD] >> ActorPage::NoUseAuthOk [GOOD] >> KqpRboYql::TPCH_YQL [GOOD] >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> KqpRboPg::Bench_10Joins [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [FAIL] >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [FAIL] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::TableSelect+useSink >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> KqpPg::V1CreateTable >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] >> BasicUsage::CreateTopicWithCustomMetricsLevel >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] >> THealthCheckTest::TestStateStorageBlue >> ActorHandler::NoUseAuthOk >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> KqpPg::DropTableIfExists >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestYmqHttpProxy::TestReceiveMessage >> KqpPg::CopyTableSerialColumns-useSink >> TestKinesisHttpProxy::GoodRequestPutRecords |84.9%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |84.9%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |84.9%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceHttpOk [GOOD] Test command err: 2026-01-10T12:07:28.890780Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703390464753171:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:28.890829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:29.000368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:29.732427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:29.739947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:29.740121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:29.910125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:29.970729Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:29.977049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:29.982114Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703390464753144:2081] 1768046848883082 != 1768046848883085 2026-01-10T12:07:30.060157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:30.408190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:30.408215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:30.408222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:30.408295Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:30.991107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:31.126365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:31.141209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:31.152303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> KqpPg::DropTableIfExists [GOOD] >> ActorHandler::NoUseAuthOk [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestKinesisHttpProxy::TestRequestBadJson >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [FAIL] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TTxDataShardReshuffleKMeansScan::MainToBuild >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> KqpPg::CreateIndex >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] Test command err: 2026-01-10T12:07:25.342895Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703378126371344:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:25.342996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:25.359459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:26.308200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:26.440484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:26.440601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:26.596811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:26.663038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:26.663121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:26.688257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703378126371114:2081] 1768046845243352 != 1768046845243355 2026-01-10T12:07:26.725990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:26.903661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:27.116726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:27.116747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:27.116756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:27.116824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:27.743627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:27.770477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:27.933378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:27.942224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:27.986794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:07:37.365948Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703430267090758:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:37.366009Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:37.443377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:37.619680Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703430267090734:2081] 1768046857363285 != 1768046857363288 2026-01-10T12:07:37.655440Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:37.667695Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:37.671420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:37.671504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:37.677723Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:37.872998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:37.873027Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:37.873040Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:37.873158Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:37.917454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.401843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:38.404256Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:38.420566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:38.502080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:38.517190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:38.534707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::NoUseAuthOk [GOOD] Test command err: 2026-01-10T12:07:30.420966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703400084110978:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:30.421016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:31.382990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:31.383088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:31.533425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:31.540555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:31.544754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:31.544926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703400084110954:2081] 1768046850402026 != 1768046850402029 2026-01-10T12:07:31.545080Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:31.873041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:31.873067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:31.873074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:31.873184Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:31.900228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:32.531888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:32.549587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:32.697700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:32.713559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:40.952042Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703443017919494:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:40.952159Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:41.057712Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:41.227283Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:41.232051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:41.237985Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:41.238822Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:41.243505Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703443017919342:2081] 1768046860919020 != 1768046860919023 2026-01-10T12:07:41.275923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:41.518843Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:41.518872Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:41.518883Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:41.518974Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:41.822468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:41.957719Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:42.008047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:42.020220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:42.142771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:42.160723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:42.167261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2026-01-10T12:07:28.601656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703391479715800:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:28.602169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:29.288251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:29.405170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:29.405284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:29.598944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:29.602513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:29.608275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:29.612550Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:29.928927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:29.928948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:29.928955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:29.929035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:30.554714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:30.718895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:30.747893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:30.766476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T12:07:40.193469Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:40.201035Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:40.202105Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:40.203007Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703440929060667:2081] 1768046859870027 != 1768046859870030 2026-01-10T12:07:40.261000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:40.261101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:40.290270Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:40.457223Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:40.528904Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:40.528931Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:40.528942Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:40.529041Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:40.944254Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:41.126215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:41.209276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:41.218111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:41.236446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::HttpOk [GOOD] Test command err: 2026-01-10T12:07:30.273136Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703399719906438:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:30.273341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:30.323552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:30.976405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:31.042473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:31.042585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:31.194713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:31.246480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703399719906203:2081] 1768046850208077 != 1768046850208080 2026-01-10T12:07:31.257074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:31.272261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:31.280147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:31.697633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:31.697653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:31.697663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:31.697763Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:32.267862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:32.395932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:32.402144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:39.944347Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703440576275550:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:39.945515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:40.000560Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:40.103028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:40.276328Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:40.336597Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:40.340379Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703440576275514:2081] 1768046859920785 != 1768046859920788 2026-01-10T12:07:40.391024Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:40.391119Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:40.446054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:40.710442Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:40.710465Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:40.710476Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:40.710556Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:40.862567Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:40.961868Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:41.238649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:41.359236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:41.387541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:41.404537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> KqpPg::CreateIndex [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> KqpPg::EquiJoin+useSink >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TestKinesisHttpProxy::ListShards >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpPg::CreateNotNullPgColumn >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TMLPConsumerTests::RecreateConsumer 2026-01-10 12:07:47,777 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:07:47,918 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 15798 58.7M 58.8M 32.7M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/00371f/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args 16011 1.0G 1.0G 1020M └─ ydb-core-persqueue-pqtablet-partition-mlp-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/00371f/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/u Test command err: 2026-01-10T11:58:07.496045Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593700983257848480:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:07.545244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:08.276904Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T11:58:08.351412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T11:58:08.685364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:08.966213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:08.966316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:09.021007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:09.021128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:09.073937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593700983257848454:2081] 1768046287461188 != 1768046287461191 2026-01-10T11:58:09.087194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:09.102651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:09.223466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:09.295532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00371f/r3tmp/yandexqprgcM.tmp 2026-01-10T11:58:09.295556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00371f/r3tmp/yandexqprgcM.tmp 2026-01-10T11:58:09.295794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00371f/r3tmp/yandexqprgcM.tmp 2026-01-10T11:58:09.295869Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T11:58:09.327962Z INFO: TTestServer started on Port 13508 GrpcPort 63462 PQClient connected to localhost:63462 2026-01-10T11:58:10.043613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:10.097434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:12.155076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701004732685977:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:12.155114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701004732685990:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:12.155184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:12.157639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593701004732685993:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:12.157746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T11:58:12.159457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T11:58:12.170328Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593701004732685992:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T11:58:12.495473Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593701004732686058:2649] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T11:58:12.496191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593700983257848480:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:12.496236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:12.522721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:12.560662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:12.773951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T11:58:12.828000Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593701004732686066:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T11:58:12.828771Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODgxNGMxNjUtOGI3M2Y5ZjUtNDAwMzVkMTktN2YxNzY3YTI=, ActorId: [1:7593701004732685975:2331], ActorState: ExecuteState, LegacyTraceId: 01kekwav5g19nztrf7cd04m0n6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T11:58:12.858475Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7593701009027653653:2831] === CheckClustersList. Ok 2026-01-10T11:58:19.854682Z :TODO INFO: TTopicSdkTestSetup started 2026-01-10T11:58:20.158036Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T11:58:20.260470Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593701039092424941:2939] connected; active server actors: 1 2026-01-10T11:58:20.261308Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T11:58:20.270152Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T11:58:20.270243Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T11:58:20.271956Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T11:58:20.277229Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T11:58:20.278042Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026 ... 0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:30.160580Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:30.160620Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:30.160655Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:30.160686Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:31.168358Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:31.168393Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:31.168417Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:31.168442Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:32.168578Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:32.168608Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:32.168628Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:32.168649Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:33.171708Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:33.171740Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:33.171762Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:33.171786Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:34.172396Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:34.172426Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:34.172446Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:34.172468Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:35.177255Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:35.177303Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:35.177328Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:35.177353Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:36.176746Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:36.176781Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:36.176803Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:36.176837Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:37.177278Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:37.177318Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:37.177342Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:37.177368Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:38.184556Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:38.184593Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:38.184613Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:38.184634Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:39.184708Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:39.184744Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:39.184765Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:39.184787Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:40.188899Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:40.188935Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:40.188958Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:40.188979Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:41.188751Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:41.188796Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:41.188819Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:41.188842Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:42.192526Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:42.192558Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:42.192579Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:42.192599Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:43.192882Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:43.192914Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:43.192938Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:43.192961Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:44.195457Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:44.195487Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:44.195506Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:44.195528Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:45.200781Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:45.200822Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:45.200845Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:45.200871Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:46.202303Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:46.202337Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:46.202362Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:46.202385Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-10T12:07:47.204207Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 50 vs 50 2026-01-10T12:07:47.204243Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-10T12:07:47.204266Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-10T12:07:47.204291Z node 3 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/00371f/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/00371f/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::LeftJoins [GOOD] Test command err: Trying to start YDB, gRPC: 22967, MsgBus: 23585 2026-01-10T12:06:27.131117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703130341901857:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:27.131186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:27.182519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:27.863186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:27.863307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:27.883991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.068441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.163642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:28.312516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.312665Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703130341901731:2081] 1768046787066980 != 1768046787066983 2026-01-10T12:06:28.439219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.444707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:28.444742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:28.444753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:28.444833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:29.467608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:29.483548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:32.133043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703130341901857:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.133114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:32.649016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703151816739120:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.649137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.649585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703151816739130:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:32.649643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.181707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:06:34.430358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:34.438138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:34.438413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:34.438700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:34.438803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:34.438888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:34.438998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:34.439093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:34.439179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:34.439276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:34.439387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:34.439479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:06:34.439582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:06:34.439677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7593703156111706970:2353];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:06:34.440632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:34.440858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:34.440952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:34.441033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:34.441112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:34.441214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:34.441303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:34.441381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:34.441478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:34.441559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7593703156111706962:2345];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_r ... s_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.553010Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.553042Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.566242Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.566332Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.566364Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.568748Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.568826Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.568853Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579500Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579577Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038132;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579586Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579617Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038113;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579632Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038132;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.579656Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038132;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.591440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.591965Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.592002Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.592824Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038142;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.592872Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038142;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.592893Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038142;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.605962Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038121;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.606050Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038121;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.606081Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038121;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.611368Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038124;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.611467Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038124;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.611499Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038124;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.618844Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.618939Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.618970Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.623828Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038126;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.623910Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038126;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.623938Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038126;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-10T12:07:35.665771Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwvb88a7s5r4tbeevdhbjm", SessionId: ydb://session/3?node_id=3&id=YTQ5ZjNjZjktYmUzMDMyOWMtY2I0N2M4ZGMtM2ZlZGNhM2Y=, Slow query, duration: 22.758080s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/t1` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t2` (\n a Int64\tNOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t3` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t4` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n ", parameters: 0b 2026-01-10T12:07:35.904678Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703423177394202:3722], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:35.904850Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:35.905058Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703423177394207:3725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:35.905221Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593703423177394209:3726], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:35.905277Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:35.913347Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:35.957040Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593703423177394211:3727], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:07:36.040328Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593703427472361564:8023] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::ListShards [GOOD] >> KqpPg::CreateNotNullPgColumn [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream >> KqpPg::CreateSequence >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::NoUseAuthOk [GOOD] Test command err: 2026-01-10T12:07:30.543989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703401669692196:2225];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:30.544389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:30.593718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:31.059453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:31.156841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:31.156951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:31.179908Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:31.181823Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703401669692008:2081] 1768046850527231 != 1768046850527234 2026-01-10T12:07:31.216918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:31.350121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:31.350159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:31.350167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:31.350292Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:31.353250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:31.566387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:31.666616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:31.680136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:31.776722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:31.793297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:31.798596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T12:07:41.699197Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703448954004268:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:41.699416Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:41.730045Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:41.843983Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:41.844113Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:41.866808Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703448954004100:2081] 1768046861682861 != 1768046861682864 2026-01-10T12:07:41.875600Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:41.900241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:41.902600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:42.131743Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:42.131770Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:42.131781Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:42.131891Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:42.362433Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:42.649323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:42.663704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:42.705621Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:42.754001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:42.778700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:42.790830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::UnknownPathNotFound [GOOD] Test command err: 2026-01-10T12:07:31.279777Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703405322760540:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:31.279819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:31.867784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:31.921595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:31.921696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:31.930294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:31.977955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:32.165126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:32.272756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:32.292836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:32.292856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:32.292862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:32.292956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:32.676204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:32.683790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:32.760760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:32.767575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:32.783570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceNoValidGroupForbidden [GOOD] Test command err: 2026-01-10T12:07:30.249085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703399019676390:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:30.249186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:30.980784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:30.980924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:31.000856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:31.202701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:31.231986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:31.240571Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703399019676219:2081] 1768046850195236 != 1768046850195239 2026-01-10T12:07:31.244335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:31.468833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:31.468854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:31.468862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:31.476244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:31.500179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:31.779533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:31.797075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:31.909209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:31.913975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::TPCH_YQL [GOOD] Test command err: Trying to start YDB, gRPC: 11843, MsgBus: 64905 2026-01-10T12:06:07.241098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703042369013190:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:07.241147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:07.678110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:07.807504Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703042369013010:2081] 1768046767207288 != 1768046767207291 2026-01-10T12:06:07.815819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:07.822311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:07.822422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:07.829253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:07.977978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:08.043445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:08.043464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:08.046219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:08.046357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:08.264509Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:08.785592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:08.797886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:06:11.562154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703059548883093:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.562264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.562448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703059548883101:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.563294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703059548883107:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.563369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:11.566741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:11.583484Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703059548883108:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:06:11.719723Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703059548883160:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:12.244197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703042369013190:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:12.244288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16998, MsgBus: 29721 2026-01-10T12:06:13.297386Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703071553932925:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:13.297525Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:13.402126Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:13.505538Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:13.508274Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703071553932766:2081] 1768046773287162 != 1768046773287165 2026-01-10T12:06:13.532650Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:13.532731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:13.552867Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:13.586792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:13.772587Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:13.772612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:13.772619Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:13.772697Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:14.278913Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:14.421977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:17.417617Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703088733802836:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:17.417744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:17.418207Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703088733802846:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:17.418279Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:17.592249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:06:17.996816Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703088733803083:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:17.997129Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703088733803083:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:17.997415Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703088733803083:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:17.997540Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703088733803083:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:17.997926Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[2:7593703088733803083:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:17.998084Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_ ... s_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.452703Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.477374Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.477470Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.477493Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.491705Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.491799Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.491824Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.503706Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.503798Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.503823Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.515580Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.515664Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.515686Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.526895Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.528535Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.528578Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.540568Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.540645Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.540696Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.545038Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.545126Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.545154Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-10T12:07:31.567758Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwvh5b0xx51w6011qnw5r7", SessionId: ydb://session/3?node_id=4&id=MWUxM2JjODktYWVlNTE5MjUtNzk1NTA5ZjAtMTU5MzY5NzQ=, Slow query, duration: 12.609519s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE `/Root/customer` (\n c_acctbal Double,\n c_address String,\n c_comment String,\n c_custkey Int32 NOT NULL, -- Identifier\n c_mktsegment String ,\n c_name String ,\n c_nationkey Int32 , -- FK to N_NATIONKEY\n c_phone String ,\n PRIMARY KEY (c_custkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/lineitem` (\n l_comment String ,\n l_commitdate Date ,\n l_discount Double , -- it should be Decimal(12, 2)\n l_extendedprice Double , -- it should be Decimal(12, 2)\n l_linenumber Int32 NOT NULL,\n l_linestatus String ,\n l_orderkey Int32 NOT NULL, -- FK to O_ORDERKEY\n l_partkey Int32 , -- FK to P_PARTKEY, first part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_SUPPKEY\n l_quantity Double , -- it should be Decimal(12, 2)\n l_receiptdate Date ,\n l_returnflag String ,\n l_shipdate Date ,\n l_shipinstruct String ,\n l_shipmode String ,\n l_suppkey Int32 , -- FK to S_SUPPKEY, second part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_PARTKEY\n l_tax Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (l_orderkey, l_linenumber)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/nation` (\n n_comment String ,\n n_name String ,\n n_nationkey Int32 NOT NULL, -- Identifier\n n_regionkey Int32 , -- FK to R_REGIONKEY\n PRIMARY KEY(n_nationkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/orders` (\n o_clerk String ,\n o_comment String ,\n o_custkey Int32 , -- FK to C_CUSTKEY\n o_orderdate Date ,\n o_orderkey Int32 NOT NULL, -- Identifier\n o_orderpriority String ,\n o_orderstatus String ,\n o_shippriority Int32 ,\n o_totalprice Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (o_orderkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/part` (\n p_brand String ,\n p_comment String ,\n p_container String ,\n p_mfgr String ,\n p_name String ,\n p_partkey Int32 NOT NULL, -- Identifier\n p_retailprice Double , -- it should be Decimal(12, 2)\n p_size Int32 ,\n p_type String ,\n PRIMARY KEY(p_partkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/partsupp` (\n ps_availqty Int32 ,\n ps_comment String ,\n ps_partkey Int32 NOT NULL, -- FK to P_PARTKEY\n ps_suppkey Int32 NOT NULL, -- FK to S_SUPPKEY\n ps_supplycost Double , -- it should be Decimal(12, 2)\n PRIMARY KEY(ps_partkey, ps_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/region` (\n r_comment String ,\n r_name String ,\n r_regionkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(r_regionkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/supplier` (\n s_acctbal Double , -- it should be Decimal(12, 2)\n s_address String ,\n s_comment String ,\n s_name String ,\n s_nationkey Int32 , -- FK to N_NATIONKEY\n s_phone String ,\n s_suppkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(s_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n", parameters: 0b 2026-01-10T12:07:31.623945Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703406621779712:3034], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:31.624073Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:31.627567Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703406621779717:3037], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:31.627660Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593703406621779718:3038], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:31.627777Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:31.632037Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:31.658338Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593703406621779721:3039], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2026-01-10T12:07:31.732912Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593703406621779772:5415] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestYmqHttpProxy::TestDeleteQueue |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 18778, MsgBus: 28085 2026-01-10T12:04:03.906599Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702512295987403:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:03.906660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:04:04.302517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:04:04.302631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:04:04.308412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:04:04.380683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:04:04.438059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593702512295987255:2081] 1768046643828793 != 1768046643828796 2026-01-10T12:04:04.444421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:04:04.544116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:04:04.544144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:04:04.544150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:04:04.544234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:04:04.643286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:04:04.913469Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:04:05.384502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:04:05.397578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:04:05.537161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:05.924946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:06.283722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:06.390646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.536405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702533770825610:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.536564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.540479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702533770825620:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.540570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:08.890902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.905613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593702512295987403:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:04:08.905909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:04:08.928245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:08.986053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.032132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.069588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.118481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.161377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.256768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:04:09.399263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538065793793:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.399337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.400304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538065793799:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.400376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593702538065793798:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.400418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:04:09.404266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:04:09.416552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593702538065793802:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:04:09.523285Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593702538065793855:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280;columns=3; Trying to start YDB, gRPC: 2281, MsgBus: 18818 2026-01-10T12: ... ard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:20.718085Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:21.087736Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703363005010033:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.088060Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.088876Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703363005010039:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.088893Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593703363005010038:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.089008Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:21.103419Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:21.159367Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593703363005010042:2506], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:07:21.247543Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593703363005010097:3810] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:23.023616Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:07:23.023659Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 20445, MsgBus: 63546 2026-01-10T12:07:29.829168Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7593703396987748685:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:29.857191Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:29.883567Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:30.071970Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:30.076884Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7593703396987748515:2081] 1768046849789979 != 1768046849789982 2026-01-10T12:07:30.100373Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:30.117441Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:30.117604Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:30.170630Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:30.549160Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:30.549190Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:30.549204Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:30.549332Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:30.600594Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:30.828161Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:31.921073Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:31.971289Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:34.816250Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7593703396987748685:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:34.816372Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:41.146032Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703448527357455:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:41.146246Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:41.147075Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703448527357467:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:41.147165Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7593703448527357468:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:41.147776Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:41.156817Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:41.178924Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7593703448527357471:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:07:41.266260Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7593703448527357522:2877] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3536;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2752;columns=4; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:32.040567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:32.040694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.040744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:32.040782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:32.040845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:32.040903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:32.040986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.041068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:32.041913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:32.042236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:32.155815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:32.155888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:32.176038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:32.176199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:32.176393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:32.185839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:32.186097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:32.186911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:32.187161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:32.189798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.190031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:32.191311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:32.191386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.191481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:32.191531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:32.191582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:32.191855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:32.366421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.367981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.368060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.377680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.377798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.377875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.377949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.378047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.378138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.378210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1120: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2026-01-10T12:07:49.708138Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2026-01-10T12:07:49.708180Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2026-01-10T12:07:49.708252Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-10T12:07:49.708303Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-10T12:07:49.708340Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-10T12:07:49.708370Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-10T12:07:49.708404Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2026-01-10T12:07:49.709165Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.709365Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.709457Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-10T12:07:49.709546Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], version: 5 2026-01-10T12:07:49.709635Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 4 2026-01-10T12:07:49.711546Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.711659Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.711716Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-10T12:07:49.711752Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 5 2026-01-10T12:07:49.711797Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 6 2026-01-10T12:07:49.711891Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2026-01-10T12:07:49.717032Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.719566Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-10T12:07:49.733000Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2627 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-10T12:07:49.733073Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-10T12:07:49.733307Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2627 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-10T12:07:49.733504Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 2627 } } CommitVersion { Step: 400 TxId: 281474976715657 } debug: NTableState::TProposedWaitParts operationId# 281474976715657:1 2026-01-10T12:07:49.734962Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 1256 RawX2: 85899349064 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-10T12:07:49.735046Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-10T12:07:49.735277Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 1256 RawX2: 85899349064 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-10T12:07:49.735410Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2026-01-10T12:07:49.735608Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 1256 RawX2: 85899349064 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-10T12:07:49.735739Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2026-01-10T12:07:49.735837Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-10T12:07:49.735913Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2026-01-10T12:07:49.735988Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:1 129 -> 240 2026-01-10T12:07:49.743098Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-10T12:07:49.750997Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-10T12:07:49.751600Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-10T12:07:49.751672Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2026-01-10T12:07:49.751909Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-10T12:07:49.751986Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-10T12:07:49.752095Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-10T12:07:49.752241Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-10T12:07:49.752331Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2026-01-10T12:07:49.752412Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-10T12:07:49.752488Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-10T12:07:49.752552Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:0 2026-01-10T12:07:49.753909Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 3 2026-01-10T12:07:49.753973Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:1 2026-01-10T12:07:49.754000Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:1 2026-01-10T12:07:49.754094Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 5 2026-01-10T12:07:49.754181Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:2 2026-01-10T12:07:49.754221Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:2 2026-01-10T12:07:49.754256Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 >> TestYmqHttpProxy::TestTagQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-10T12:06:26.549497Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1768046786549463 2026-01-10T12:06:27.286737Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703131919144271:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:27.286797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:27.457011Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:27.547726Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:28.145067Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.145218Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:28.236177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.341488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:28.341631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:28.359482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:28.359556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:28.487616Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:06:28.491470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.492794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.494139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.538937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:28.539011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.560497Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.601539Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.603472Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:28.920807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003051/r3tmp/yandexO3PNxd.tmp 2026-01-10T12:06:28.920833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003051/r3tmp/yandexO3PNxd.tmp 2026-01-10T12:06:28.921003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003051/r3tmp/yandexO3PNxd.tmp 2026-01-10T12:06:28.930218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:29.046585Z INFO: TTestServer started on Port 24535 GrpcPort 61931 PQClient connected to localhost:61931 2026-01-10T12:06:29.340550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:32.287872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703131919144271:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.287952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:33.583510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703157688949396:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.583677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703157688949409:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.583738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.586674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703157688949413:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.586775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:33.588531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:33.706400Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703157688949412:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:06:34.436452Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703157688949500:3009] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:34.531765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:06:34.554126Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703155706714992:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:34.559567Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703161983916814:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:34.561200Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmZiMjM4Y2ItNWE2MTEwOTctNzkwZmViMzAtMzQ5NGFmZTc=, ActorId: [1:7593703157688949393:2332], ActorState: ExecuteState, LegacyTraceId: 01kekwt4t7457rbffbsx3wmvt5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:06:34.569131Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MThlYjI1NGUtNDdmYTg5NTItZGQ1YmJhMmMtZjY4ZDNjNDk=, ActorId: [2:7593703155706714968:2303], ActorState: ExecuteState, LegacyTraceId: 01kekwt4xr3z7dd8c08n46wddf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:06:34.622154Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:06:34.622300Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At functi ... : (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:08:04.693784Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1768046884] updating configuration. Deleted partitions []. Added partitions [] 2026-01-10T12:08:04.694163Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:08:04.696211Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:08:04.701964Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1768046884744, TxId 281474976710678 2026-01-10T12:08:04.702005Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.702028Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-10T12:08:04.702077Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:08:04.702088Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.702105Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:08:04.702116Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.702366Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1768046884744, TxId 281474976710678 2026-01-10T12:08:04.702393Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.702407Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:08:04.702422Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.702453Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:08:04.702634Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976710678 2026-01-10T12:08:04.702662Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:08:04.702677Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:08:04.702695Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.702935Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:08:04.705332Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:08:04.705599Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:08:04.705696Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:08:04.705725Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.705743Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.705759Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.705777Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.705792Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.705819Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:08:04.706170Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1768046884" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1768046884" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1768046884" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-10T12:08:04.706214Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:08:04.706332Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:08:04.708415Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:08:04.716638Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-10T12:08:04.727780Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.727825Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.727845Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.727867Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.727884Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.733945Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.733983Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.734000Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.734020Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.734036Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.828163Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.828200Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.828215Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.828230Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.828244Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.834294Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.834330Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.834347Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.834364Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.834378Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.928438Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.928477Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.928492Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.928509Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.928525Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:08:04.934600Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:04.934637Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.934653Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:04.934670Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:04.934684Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:08:05.295184Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [9:7593703551732165086:2486] TxId: 281474976710679. Ctx: { TraceId: 01kekwwy1v4ftwjbz4f0s7dj49, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTlkZGNiODgtMjBjNjA0YTAtZDJjZTJlZTMtYmM5ZTljNTk=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-01-10T12:08:05.295832Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [9:7593703551732165093:2486], TxId: 281474976710679, task: 3. Ctx: { CheckpointId : . TraceId : 01kekwwy1v4ftwjbz4f0s7dj49. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=YTlkZGNiODgtMjBjNjA0YTAtZDJjZTJlZTMtYmM5ZTljNTk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [9:7593703551732165086:2486], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> MonPage::OptionsNoContent [GOOD] Test command err: 2026-01-10T12:07:26.449435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703381922129377:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:26.449624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:26.471291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:27.349092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:27.360023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:27.373079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:27.456207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:27.456318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:27.825231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:27.867872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703381922129169:2081] 1768046846352219 != 1768046846352222 2026-01-10T12:07:27.892700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:27.893015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:27.893024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:27.893033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:27.893127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:28.362881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:28.408260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:28.569129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:28.583810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:28.607840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:07:38.934996Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703435140209384:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:38.935162Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:38.990801Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:39.180417Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703435140209172:2081] 1768046858905994 != 1768046858905997 2026-01-10T12:07:39.191672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:39.191797Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:39.192290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:39.194308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:39.197628Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:39.481209Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:39.481248Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:39.481259Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:39.481411Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:39.723263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:39.935149Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.016606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:40.082719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:40.096057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 25509, MsgBus: 14700 2026-01-10T12:06:09.550169Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703052196002108:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:09.550206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:09.913687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:09.952765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:09.952863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:10.099453Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703052196002084:2081] 1768046769543468 != 1768046769543471 2026-01-10T12:06:10.130540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:10.133931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:10.169758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:10.293071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:10.293102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:10.293109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:10.293183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:10.608239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:10.964366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:13.543780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703069375872156:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.543898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.544798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703069375872166:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.544869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:13.964507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:06:14.552416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703052196002108:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:14.552524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:15.056662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:15.057034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:15.057296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:15.057418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:15.057523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:15.057626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:15.057725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:15.057829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:15.057942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:15.058070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:15.058385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:06:15.058589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:06:15.058696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[1:7593703073670840030:2337];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:06:15.073834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:06:15.073917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:06:15.074177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:06:15.074284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:06:15.074450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:06:15.074574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:06:15.074701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:06:15.074840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:06:15.074952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:06:15.075078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:06:15.075202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:06:15.075339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593703073670840040:2347];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:06:15.075457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186 ... ot/.metadata/script_executions 2026-01-10T12:07:27.448832Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:27.448858Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:27.448867Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:27.448954Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:27.717904Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:27.981401Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:27.993188Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:31.700239Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593703382971861027:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:31.700344Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12898, MsgBus: 21645 2026-01-10T12:07:33.222870Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:33.223091Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:33.347880Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:33.348025Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:33.350438Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:33.401551Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:33.416541Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:33.471137Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:33.471176Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:33.471186Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:33.471275Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:34.187092Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:34.384789Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:39.073594Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593703441368654761:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:39.073721Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:39.074497Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593703441368654771:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:39.074577Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:39.197499Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.323408Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.403039Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.500554Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.758845Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.837254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:39.936292Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:40.014994Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:40.091893Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:40.194917Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:40.418676Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593703445663622806:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:40.418785Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:40.419274Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593703445663622811:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:40.419337Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593703445663622812:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:40.419388Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:40.429740Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:40.468298Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593703445663622815:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2026-01-10T12:07:40.543732Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593703445663622866:3028] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 51], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:41.960880Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:07:41.960943Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageYellow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:27.414357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:27.414704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:27.543831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:27.544893Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:27.552853Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:27.553312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:27.553551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:27.555246Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:27.555593Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:27.555764Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:28.037345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:28.213690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:28.213858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:28.214744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:28.214849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:28.274147Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:05:28.275277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:28.275910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:28.424377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:28.439806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:29.710089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:29.710158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:29.710193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:29.710612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-10T12:05:41.417241Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:41.417587Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:41.435931Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:41.437394Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:41.440058Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:580:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:41.440457Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:41.440674Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:41.442223Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:573:2169], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:41.442629Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:41.442807Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:41.893416Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:42.025334Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:42.025479Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:42.026000Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:42.026053Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:42.087455Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:42.088502Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:42.088983Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:42.150731Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:42.186257Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:48.317953Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:48.322474Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:05:48.324365Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:05:48.328393Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:48.328469Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:48.328509Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:48.329466Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:05:48.339734Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-10T12:05:48.340146Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:05:48.384390Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:48.384581Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:48.446724Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:05:48.447584Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:48.777969Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2026-01-10T12:05:48.778747Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-3" reason: "YELLOW-7932-1231c6b1-4" reason: "YELLOW-7932-1231c6b1-5" type: "COMPUTE" level: 2 } issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node ... 1-10T12:07:38.236695Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:38.236773Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:38.237075Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:38.237170Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:38.237437Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:38.237538Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:38.237808Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:38.237884Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:38.238166Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:38.238245Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:38.350270Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.375237Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-10T12:07:38.375848Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-10T12:07:38.376037Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2026-01-10T12:07:38.376610Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2026-01-10T12:07:38.376762Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 19 Cookie 19 2026-01-10T12:07:38.376905Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 20 Cookie 20 2026-01-10T12:07:38.377033Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 21 Cookie 21 2026-01-10T12:07:38.377153Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 22 Cookie 22 2026-01-10T12:07:38.377636Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.378835Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.379098Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.379295Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.379475Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.379676Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.379857Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.380037Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.380907Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:38.571568Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.589390Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.706626Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.728770Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.745218Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.775270Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.811666Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:38.945673Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:40.712159Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:40.712272Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:40.712346Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:40.712745Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:40.828590Z node 15 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.828841Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.828936Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.829127Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.829254Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.829388Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.829501Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.829638Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:40.830061Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-17" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 17 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-18" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 18 host: "::1" port: 12005 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-19" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 19 host: "::1" port: 12006 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-20" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 20 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-21" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 21 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-22" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 22 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-17" reason: "YELLOW-7932-1231c6b1-18" reason: "YELLOW-7932-1231c6b1-19" reason: "YELLOW-7932-1231c6b1-20" reason: "YELLOW-7932-1231c6b1-21" reason: "YELLOW-7932-1231c6b1-22" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1da1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-14" type: "STATE_STORAGE_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-b30b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-59f1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-14" type: "SCHEME_BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-106b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-7532" status: YELLOW message: "Multiple rings have unavailable replicas" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-14" type: "BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-0632-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "BOARD_NODE" level: 3 listed: 3 count: 3 } location { id: 14 host: "::1" port: 12001 } >> TestKinesisHttpProxy::TestUnauthorizedPutRecords |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |85.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |85.0%| [TM] {RESULT} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |85.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |85.1%| [TA] $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [TA] {RESULT} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |85.1%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |85.1%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> TestYmqHttpProxy::TestListQueues >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |85.1%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.1%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> HttpRequest::ProbeBaseStatsServerless |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.2%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> ColumnStatistics::EqWidthHistogram >> BasicStatistics::ServerlessGlobalIndex >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence |85.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:49.523605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:49.523735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:49.523777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:49.523838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:49.523878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:49.523910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:49.523994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:49.524070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:49.524966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:49.525272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:49.626532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:49.626596Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:49.644283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:49.644393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:49.644622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:49.654115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:49.654359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:49.655139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:49.655442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:49.664970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:49.665199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:49.666633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:49.666704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:49.666777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:49.666833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:49.666893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:49.667184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:49.984805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.985949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.986992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.987090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:49.987177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:07:43.449763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:07:47.875837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:07:47.875988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2026-01-10T12:07:47.876105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:07:47.976070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-10T12:07:47.976234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:07:47.976356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:07:48.120418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-10T12:07:48.120562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-10T12:07:48.120660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-10T12:07:48.182622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-10T12:07:48.182768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-10T12:07:48.182852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-10T12:07:48.227751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-10T12:07:48.274199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:07:48.274396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:07:48.274483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:07:48.274617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:07:48.285473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:07:52.506907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-10T12:07:52.551323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:07:52.551601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:07:52.551681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:07:52.551813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:07:52.562612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:07:56.749912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0023 2026-01-10T12:07:56.791219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:07:56.791463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:07:56.791559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:07:56.791707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:07:56.802247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:08:00.919021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2026-01-10T12:08:00.960425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:08:00.960685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:08:00.960767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:08:00.960912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:08:00.971450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:08:05.002387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2026-01-10T12:08:05.043782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:08:05.043970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:08:05.044022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:08:05.044147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:08:05.054645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:08:09.157489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2026-01-10T12:08:09.198885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-10T12:08:09.199131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-10T12:08:09.199213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-10T12:08:09.199405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:08:09.209840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-10T12:08:13.058716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:13.059167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-39-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2026-01-10T12:08:13.067148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest::LdapServerIsUnavailable >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |85.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |85.2%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless |85.2%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::TwoTables >> BasicStatistics::SimpleGlobalIndex >> BasicStatistics::DedicatedTimeIntervals >> HttpRequest::Probe >> TestKinesisHttpProxy::TestListStreamConsumersWithToken |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] |85.2%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestYmqHttpProxy::TestDeleteMessage >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TestKinesisHttpProxy::TestWrongStream >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> TestYmqHttpProxy::TestUntagQueue >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] >> KqpWorkloadServiceDistributed::TestDistributedQueue >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestYmqHttpProxy::TestListQueues [GOOD] >> DefaultPoolSettings::TestResourcePoolsSysViewFilters >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> BasicStatistics::TwoNodes [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> TestKinesisHttpProxy::TestCounters |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:34.928974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:35.099487Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:533:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:35.099964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:35.100056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:35.764687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:35.894854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:35.895051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:35.938132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:36.091133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:36.956169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:36.956926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:36.956984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:36.957020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:36.958086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:37.057158Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:37.774715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:07:46.044014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:46.044377Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:46.055896Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:07:46.055967Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-10T12:07:46.061508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:46.061804Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:46.156483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:46.156616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:46.157140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:46.157231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:46.202292Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:07:46.202520Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:07:46.213475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:46.213942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:46.500998Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:46.501143Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:46.502059Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.503022Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.503891Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.517714Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.518166Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.518417Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.518635Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.518934Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.519419Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:07:46.546936Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:07:46.548191Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:46.808584Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:46.874478Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:07:46.874581Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:07:46.918528Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:07:46.920008Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:07:46.920245Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:07:46.920313Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:07:46.920368Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:07:46.920429Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:07:46.920479Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:07:46.920531Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:07:46.921364Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:07:46.946130Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:07:46.946239Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2544:2555], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:07:47.002760Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2580:2584] 2026-01-10T12:07:47.003590Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2580:2584], schemeshard id = 72075186224037897 2026-01-10T12:07:47.081602Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2650:2618] 2026-01-10T12:07:47.086681Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:07:47.108032Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. Describe result: PathErrorUnknown 2026-01-10T12:07:47.108204Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. Creating table 2026-01-10T12:07:47.108297Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:07:47.125150Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2744:2662], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:07:47.134050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.147868Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:07:47.148051Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. Subscribe on create table tx: 281474976720657 2026-01-10T12:07:47.163098Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2673:2630] Owner: [2:2672:2629]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:07:47.370741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: ... ] RequestId[ 23 ], ReplyToActorId[ [2:4346:3264]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:09.276934Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:08:09.276973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4346:3264], StatRequests.size() = 1 2026-01-10T12:08:10.249427Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4377:3274]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:10.249645Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:08:10.249687Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4377:3274], StatRequests.size() = 1 2026-01-10T12:08:10.737526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:08:10.737758Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:08:10.737919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:08:10.738138Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:08:11.266039Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4408:3284]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:11.266285Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:08:11.266315Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4408:3284], StatRequests.size() = 1 2026-01-10T12:08:12.358495Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4443:3296]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:12.358872Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:08:12.358930Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4443:3296], StatRequests.size() = 1 2026-01-10T12:08:13.589602Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4478:3306]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:13.589921Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:08:13.589966Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4478:3306], StatRequests.size() = 1 2026-01-10T12:08:14.314883Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:08:14.480035Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:08:14.480186Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:08:14.480241Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:08:14.480297Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:08:15.080974Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4521:3319]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:15.081285Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:08:15.081332Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4521:3319], StatRequests.size() = 1 2026-01-10T12:08:16.298494Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4556:3329]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:16.298866Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:08:16.298918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4556:3329], StatRequests.size() = 1 2026-01-10T12:08:17.039443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:08:17.039828Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-10T12:08:17.040109Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:08:17.040168Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:08:17.101168Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:08:17.101253Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:08:17.101570Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:08:17.122064Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:08:17.801306Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4589:3341]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:17.801632Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:08:17.801679Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4589:3341], StatRequests.size() = 1 2026-01-10T12:08:19.034256Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4622:3353]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:19.034643Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:08:19.034694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4622:3353], StatRequests.size() = 1 2026-01-10T12:08:20.197127Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4655:3363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:20.197487Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:08:20.197540Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4655:3363], StatRequests.size() = 1 2026-01-10T12:08:20.816853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:08:21.502277Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4692:3375]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:21.502671Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:08:21.502728Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4692:3375], StatRequests.size() = 1 2026-01-10T12:08:22.662232Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4727:3385]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:22.662576Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-10T12:08:22.662633Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4727:3385], StatRequests.size() = 1 2026-01-10T12:08:23.290349Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:08:23.290625Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-10T12:08:23.290769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:08:23.291044Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:08:23.364962Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:08:23.365052Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:08:23.365305Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:08:23.391196Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:08:24.054227Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4762:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:24.054590Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-10T12:08:24.054646Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4762:3397], StatRequests.size() = 1 2026-01-10T12:08:24.055162Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:4764:2653]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:24.059278Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:08:24.059406Z node 3 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [3:4774:2657] 2026-01-10T12:08:24.059484Z node 3 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [3:4774:2657] 2026-01-10T12:08:24.079361Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4782:3399] 2026-01-10T12:08:24.080005Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:4774:2657], server id = [2:4782:3399], tablet id = 72075186224037894, status = OK 2026-01-10T12:08:24.092721Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4782:3399], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:08:24.092842Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2026-01-10T12:08:24.093205Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 18446744073709551615 2026-01-10T12:08:24.093343Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [3:4764:2653], StatRequests.size() = 1 2026-01-10T12:08:24.093746Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> TestKinesisHttpProxy::ListShardsTimestamp >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> KqpPg::AlterColumnSetDefaultFromSequence [FAIL] >> KqpPg::CreateTableIfNotExists_GenericQuery >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] >> TestKinesisHttpProxy::TestWrongStream2 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> KqpWorkloadServiceActors::TestPoolFetcher >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag |85.3%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageRed >> TExportToS3Tests::CheckItemProgress [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> TExportToS3Tests::CancelledExportEndTime >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> KqpPg::ExplainColumnsReorder [GOOD] >> TSchemeShardServerLess::StorageBilling [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> THeavyPerfTest::TTestLoadEverything [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> TestYmqHttpProxy::TestSendMessageBatch |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> ResourcePoolsDdl::TestPoolSwitchToLimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:56.731489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:56.731604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:56.731673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:56.731730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:56.731776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:56.731811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:56.731903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:56.731995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:56.733026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:56.733385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:56.835808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:56.835890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:56.866597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:56.866797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:56.867083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:56.892439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:56.892734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:56.893475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:56.893787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:56.901419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:56.901699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:56.903281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:56.903354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:56.903435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:56.903486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:56.903532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:56.903900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:57.215922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.229930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:57.230608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eBoard Send, to populator: [1:1116:3033], at schemeshard: 72075186233409549, txId: 107, path id: 32 2026-01-10T12:08:10.132964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-10T12:08:10.133011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2026-01-10T12:08:10.133074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-10T12:08:10.133102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2026-01-10T12:08:10.133138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-10T12:08:10.133893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-10T12:08:10.133992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-10T12:08:10.134032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-10T12:08:10.134068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 12 2026-01-10T12:08:10.134104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 6 2026-01-10T12:08:10.135362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-10T12:08:10.135471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-10T12:08:10.135498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-10T12:08:10.135519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], version: 18446744073709551615 2026-01-10T12:08:10.135541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-10T12:08:10.135592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-10T12:08:10.137131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-10T12:08:10.137194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2026-01-10T12:08:10.137512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-10T12:08:10.137689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:08:10.137720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:08:10.137750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:08:10.137776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:08:10.137807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-10T12:08:10.137863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1307:3187] message: TxId: 107 2026-01-10T12:08:10.137906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:08:10.137940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:08:10.137973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-10T12:08:10.138072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 2 2026-01-10T12:08:10.138901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-10T12:08:10.139918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-10T12:08:10.140743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:08:10.140786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:3508:5340] TestWaitNotification: OK eventTxId 107 2026-01-10T12:08:10.157049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 1281 RawX2: 4294970465 } TabletId: 72075186233409552 State: 4 2026-01-10T12:08:10.157134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2026-01-10T12:08:10.158484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2026-01-10T12:08:10.158600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2026-01-10T12:08:10.159021Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2026-01-10T12:08:10.161578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2026-01-10T12:08:10.161851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 1 2026-01-10T12:08:10.162408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2026-01-10T12:08:10.162455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 32], at schemeshard: 72075186233409549 2026-01-10T12:08:10.162525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2026-01-10T12:08:10.164574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72075186233409549:4 2026-01-10T12:08:10.164625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2026-01-10T12:08:10.164972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2026-01-10T12:08:10.278660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-10T12:08:10.278753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:08:10.278809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:08:10.278870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-10T12:08:10.278911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-10T12:08:10.278942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-10T12:08:10.278974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-10T12:08:10.279003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-10T12:08:10.279023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-10T12:08:10.335313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:10.335419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:06:00.000000Z 2026-01-10T12:08:10.335519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:33.946827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:33.946962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:07:00.000000Z 2026-01-10T12:08:33.947037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete >> TestKinesisHttpProxy::TestCounters [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.3%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Checksums ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 28166, MsgBus: 13727 2026-01-10T12:07:00.555492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703272964737848:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:00.555683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:00.898518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:01.016217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:01.016353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:01.023261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703272964737816:2081] 1768046820553507 != 1768046820553510 2026-01-10T12:07:01.031061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.034378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:01.129102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:01.152766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:01.152790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:01.152815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:01.152922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:01.571404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:01.782489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:01.800813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:05.018227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703294439575193:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:05.018344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703294439575204:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:05.018399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:05.023150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:05.023469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703294439575208:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:05.023544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:05.038795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703294439575207:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:07:05.183170Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703294439575260:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:05.556474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703272964737848:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:05.556566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17522, MsgBus: 18456 2026-01-10T12:07:06.320202Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703298654585576:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:06.320268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:06.363984Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:06.556280Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:06.556361Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:06.603242Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:06.623558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:06.636164Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703298654585536:2081] 1768046826313384 != 1768046826313387 2026-01-10T12:07:06.640213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:06.878086Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:06.878106Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:06.878113Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:06.878191Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:07.352228Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:07.475901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:07.482927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:10.721001Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703315834455608:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:10.721127Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:10.721817Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703315834455621:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:10.721862Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703315834455620:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:10.721892Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:10.725549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:10.742722Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703315834455624:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:07:10.826163Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703315834455675:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13393, MsgBus: 22784 2026-01-10T12:07:11.899655Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703317807477835:2246];send_to ... meshard: 72057594046644480 2026-01-10T12:08:18.982476Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593703586781677003:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:18.982621Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:20.044851Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593703616846448937:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.045018Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.048594Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593703616846448947:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.048734Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.075345Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:20.169211Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:20.288733Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593703616846449115:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.288950Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.289713Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593703616846449120:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.289777Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593703616846449121:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.290678Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:20.301750Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:20.329250Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593703616846449124:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-10T12:08:20.429168Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593703616846449175:2649] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-10T12:08:31.642205Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:31.652493Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:31.659105Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:31.659690Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:31.660106Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:32.061259Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:32.061498Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:32.103603Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:34:2081] 1768046904405762 != 1768046904405766 2026-01-10T12:08:32.117763Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:32.173920Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:32.274957Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:32.875421Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:32.915107Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:33.081266Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:33.081489Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:875:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:33.081678Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:33.083756Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:880:2757], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:33.084026Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:33.091958Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:33.480407Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:878:2755], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:08:33.516233Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:935:2793] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "db0fd744-41beba3b-9225bec1-4467f9f5" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'219) \'(\'\"_id\" \'\"247a0a6a-994d5755-b8a3b2f7-66545e4d\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |85.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TestKinesisHttpProxy::TestWrongRequest >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/build_index/ut/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild 2026-01-10 12:08:30,718 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:08:30,910 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 18016 58.9M 59.1M 33.0M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/002e10/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/test_tool.args 18213 1.7G 1.7G 1.6G └─ ydb-core-tx-datashard-build_index-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/002e10/ydb/core/tx/datashard/build_index/ut/test-results/unittest/ytest.re Test command err: 2026-01-10T11:58:46.806356Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593701150992195334:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:46.806568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T11:58:48.489491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T11:58:48.727095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T11:58:49.074007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T11:58:49.080418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:49.452407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T11:58:49.683574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T11:58:49.692381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T11:58:49.812800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T11:58:50.562190Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.133948s 2026-01-10T11:58:50.562590Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.134393s 2026-01-10T11:58:50.864060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T11:58:51.285014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T11:58:51.816810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593701150992195334:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T11:58:51.827528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T11:58:51.859050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T11:58:52.640863Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7593701176761999817:2286] 2026-01-10T11:58:52.645512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T11:58:53.223566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T11:58:53.223630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T11:58:53.302394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T11:58:53.303491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T11:58:53.316721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T11:58:53.343426Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T11:58:53.343852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T11:58:53.352453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7593701181056967129:2286] in generation 1 2026-01-10T11:58:53.761661Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T11:58:54.405414Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T11:58:54.421780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T11:58:54.422539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7593701185351934432:2289] 2026-01-10T11:58:54.422569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:58:54.422896Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T11:58:54.423211Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:58:54.426059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593701176761999812:2517], serverId# [1:7593701176761999818:2520], sessionId# [0:0:0] 2026-01-10T11:58:54.426082Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T11:58:54.426772Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T11:58:54.428055Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:58:54.448154Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:58:54.448213Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T11:58:54.448273Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:58:54.448392Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T11:58:54.451631Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-10T11:58:54.452604Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-10T11:58:54.485117Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:58:54.489005Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T11:58:54.489428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T11:58:54.500441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593701185351934447:2541], serverId# [1:7593701185351934448:2542], sessionId# [0:0:0] 2026-01-10T11:58:54.574988Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1768046334544 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768046334544 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T11:58:54.575034Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:58:54.585131Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T11:58:54.585902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:58:54.585920Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T11:58:54.586410Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768046334544:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T11:58:54.598513Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1768046334544:281474976710657 keys extracted: 0 2026-01-10T11:58:54.599341Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T11:58:54.606280Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T11:58:54.606322Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T11:58:54.652152Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T11:58:54.665640Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T11:58:54.691584Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1768046334544} 2026-01-10T11:58:54.691995Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:58:54.701437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1768046334551 2026-01-10T11:58:54.701465Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T11:58:54.702184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1768046334551 2026-01-10T11:58:54.702595Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T11:58:54.702612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T11:58:54.702636Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T11:58:54.703408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768046334544 : ... #72057594037968897 Node(44, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:26.843057Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:26.857700Z node 44 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:26.922262Z node 44 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:26.933853Z node 44 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [44:7593703640586240551:2283] 2026-01-10T12:08:26.934175Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:08:26.964067Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:08:26.964175Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:08:26.967298Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:08:26.967381Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:08:26.967449Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:08:26.968037Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:08:26.968521Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:08:26.968571Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [44:7593703640586240566:2283] in generation 1 2026-01-10T12:08:26.969501Z node 44 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:08:26.969546Z node 44 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:08:26.969653Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:08:26.969713Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [44:7593703640586240568:2284] 2026-01-10T12:08:26.969735Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:08:26.969755Z node 44 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:08:26.969775Z node 44 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:08:26.969917Z node 44 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:08:26.970016Z node 44 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:08:26.970048Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:08:26.970074Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:08:26.970096Z node 44 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:08:26.970128Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:08:26.971638Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [44:7593703640586240549:2493], serverId# [44:7593703640586240553:2494], sessionId# [0:0:0] 2026-01-10T12:08:26.971794Z node 44 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:08:26.972203Z node 44 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:08:26.972290Z node 44 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:08:26.975331Z node 44 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:08:26.975748Z node 44 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:08:26.975819Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:08:26.980169Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [44:7593703640586240582:2510], serverId# [44:7593703640586240584:2512], sessionId# [0:0:0] 2026-01-10T12:08:26.984322Z node 44 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1768046907025 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768046907025 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:08:26.984360Z node 44 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:08:26.984517Z node 44 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:08:26.984628Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:08:26.984660Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:08:26.984704Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768046907025:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:08:26.985127Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1768046907025:281474976715657 keys extracted: 0 2026-01-10T12:08:26.985302Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:08:26.985586Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:08:26.985651Z node 44 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:08:26.986685Z node 44 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:08:26.987337Z node 44 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:08:26.991140Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1768046907024 2026-01-10T12:08:26.991177Z node 44 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:08:26.991692Z node 44 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1768046907025} 2026-01-10T12:08:26.991776Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:08:26.991853Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1768046907032 2026-01-10T12:08:26.991906Z node 44 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:08:26.991940Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:08:26.991971Z node 44 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:08:26.992042Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768046907025 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [44:7593703640586240134:2151], exec latency: 1 ms, propose latency: 6 ms 2026-01-10T12:08:26.992105Z node 44 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:08:26.992156Z node 44 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:08:26.996656Z node 44 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:08:26.996725Z node 44 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:08:27.082350Z node 44 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:27.284936Z node 44 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/002e10/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/002e10/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |85.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> TestKinesisHttpProxy::ListShardsToken |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/build_index/ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] |85.3%| [TM] {RESULT} ydb/core/tx/datashard/build_index/ut/unittest |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> TestKinesisHttpProxy::GoodRequestCreateStream |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> TExportToS3Tests::Checksums [GOOD] >> KqpWorkloadService::TestQueueSizeSimple >> TestYmqHttpProxy::TestListQueueTags >> TExportToS3Tests::ChecksumsWithCompression >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> DefaultPoolSettings::TestResourcePoolsSysViewFilters [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |85.3%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD |85.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-10T12:07:42.976306Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703453464939564:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:42.976879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:43.407534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:43.470841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:43.470974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:43.508053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:43.843554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:43.849661Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703453464939398:2081] 1768046862874669 != 1768046862874672 2026-01-10T12:07:43.860545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:43.860590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:43.860600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:43.860612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:43.860695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:44.014789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:44.188560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:44.624880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:44.633638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:44.641527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:44.684949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:07:44.785288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.992701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.080662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:45.086681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.178934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.257126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.315517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.358010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.397235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.443688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.493842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.827306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703474939777532:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.827466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.828053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703474939777545:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.828135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703474939777544:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.828302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.839133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:47.873308Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703474939777548:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:47.939959Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703474939777599:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:47.948049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703453464939564:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:47.948133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:48.442850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.536912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.573233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:40.464919Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:08:40.465009Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 26ms 2026-01-10T12:08:40.465335Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:40.466814Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:40.466837Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 27ms 2026-01-10T12:08:40.467141Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:40.467164Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:08:40.467255Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 27ms 2026-01-10T12:08:40.467611Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:40.664176Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7593703699819418977:2446]: Pool not found 2026-01-10T12:08:40.664384Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:08:41.123968Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7593703699819419002:2450]: Pool not found 2026-01-10T12:08:41.128228Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:08:41.140548Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593703704114386384:2466], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:41.140724Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7593703704114386385:2467], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:08:41.140811Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:41.144771Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593703704114386388:2468], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:41.144921Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:41.432340Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38594) incoming connection opened 2026-01-10T12:08:41.432436Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38594) -> (POST /Root, 3 bytes) 2026-01-10T12:08:41.432586Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d808:3e24:aa7b:0:c008:3e24:aa7b:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 19f15983-25125917-7201c05-3d26ee1c 2026-01-10T12:08:41.432872Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UnknownMethodName] requestId [19f15983-25125917-7201c05-3d26ee1c] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2026-01-10T12:08:41.433049Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38594) <- (400 InvalidAction, 76 bytes) 2026-01-10T12:08:41.433105Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:38594) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2026-01-10T12:08:41.433138Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:38594) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 19f15983-25125917-7201c05-3d26ee1c Content-Type: application/x-amz-json-1.1 Content-Length: 76 2026-01-10T12:08:41.433251Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38594) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2026-01-10T12:08:41.543293Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7593703704114386382:2465]: Pool not found 2026-01-10T12:08:41.543975Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2026-01-10T12:03:41.932964Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593702417103242656:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:03:41.933172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:03:42.318402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:42.544154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:03:42.590010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:03:42.590145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:03:42.610849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:03:42.787254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:03:42.898818Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:03:42.921789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:03:43.160911Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593702417103242674:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:03:43.160997Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593702425693177785:2458] HANDLE EvNavigateScheme dc-1 2026-01-10T12:03:43.161107Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593702421398209979:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:03:43.161206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593702421398210136:2213][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593702421398209979:2120], cookie# 1 2026-01-10T12:03:43.162834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702421398210150:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210146:2213], cookie# 1 2026-01-10T12:03:43.162874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702421398210153:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210147:2213], cookie# 1 2026-01-10T12:03:43.162902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593702421398210155:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210148:2213], cookie# 1 2026-01-10T12:03:43.162993Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702417103242366:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210150:2213], cookie# 1 2026-01-10T12:03:43.163026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702417103242369:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210153:2213], cookie# 1 2026-01-10T12:03:43.163044Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593702417103242372:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593702421398210155:2213], cookie# 1 2026-01-10T12:03:43.163092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702421398210150:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702417103242366:2050], cookie# 1 2026-01-10T12:03:43.163135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702421398210153:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702417103242369:2053], cookie# 1 2026-01-10T12:03:43.163178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593702421398210155:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702417103242372:2056], cookie# 1 2026-01-10T12:03:43.163227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702421398210136:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702421398210146:2213], cookie# 1 2026-01-10T12:03:43.163251Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593702421398210136:2213][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:03:43.163269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702421398210136:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702421398210147:2213], cookie# 1 2026-01-10T12:03:43.163291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593702421398210136:2213][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:03:43.163321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593702421398210136:2213][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593702421398210148:2213], cookie# 1 2026-01-10T12:03:43.163337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593702421398210136:2213][/dc-1] Sync cookie mismatch: sender# [1:7593702421398210148:2213], cookie# 1, current cookie# 0 2026-01-10T12:03:43.163393Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593702421398209979:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:03:43.174488Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593702421398209979:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593702421398210136:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:03:43.174648Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593702421398209979:2120], cacheItem# { Subscriber: { Subscriber: [1:7593702421398210136:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:03:43.178521Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593702425693177786:2459], recipient# [1:7593702425693177785:2458], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:03:43.178630Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593702425693177785:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:03:43.255949Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593702425693177785:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:03:43.264248Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593702425693177785:2458] Handle TEvDescribeSchemeResult Forward to# [1:7593702425693177784:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:03:43.273062Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593702417103242674:2118] Handle TEvProposeTransaction 2026-01-10T12:03:43.273088Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593702417103242674:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:03:43.273204Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593702417103242674:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593702425693177794:2463] 2026-01-10T12:03:43.686004Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593702425693177794:2463] txid# 2814749767 ... metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:40.179188Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593703703036155976:3347], recipient# [6:7593703703036155975:2728], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.249514Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7593703106035699949:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.249702Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593703106035699949:2107], cacheItem# { Subscriber: { Subscriber: [6:7593703110330667634:2332] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:40.249824Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593703703036155978:3348], recipient# [6:7593703703036155977:2729], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.296294Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7593703103062286275:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.296511Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7593703103062286275:2107], cacheItem# { Subscriber: { Subscriber: [7:7593703107357253915:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:40.296655Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7593703700062741453:2629], recipient# [7:7593703700062741452:2639], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.391541Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7593703103062286275:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:40.391727Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7593703103062286275:2107], cacheItem# { Subscriber: { Subscriber: [7:7593703133127057734:2321] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:40.391844Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7593703700062741456:2630], recipient# [7:7593703700062741455:2640], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.128264Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7593703106035699949:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.128452Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593703106035699949:2107], cacheItem# { Subscriber: { Subscriber: [6:7593703136100471537:2417] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:41.128571Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593703707331123277:3349], recipient# [6:7593703707331123276:2730], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.184291Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7593703106035699949:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.184478Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593703106035699949:2107], cacheItem# { Subscriber: { Subscriber: [6:7593703110330667634:2332] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:41.184609Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593703707331123279:3350], recipient# [6:7593703707331123278:2731], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.252904Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7593703106035699949:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:08:41.253035Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593703106035699949:2107], cacheItem# { Subscriber: { Subscriber: [6:7593703110330667634:2332] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:08:41.253121Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593703707331123281:3351], recipient# [6:7593703707331123280:2732], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2026-01-10T12:07:42.682739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703451885129050:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:42.682868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:43.400846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:43.400953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:43.404163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:43.496631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:43.724308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:43.811172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:43.840262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:43.996379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:43.996401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:43.996409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:43.996484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:44.559873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:44.577207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:44.822918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:44.843812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:07:44.850057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:44.886129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T12:07:45.008028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.208805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.278355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-01-10T12:07:45.283690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.361078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.421506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.485011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.531478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.588974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.651966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.726501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.684103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703451885129050:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:47.684205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:48.065626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477654934444:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.065730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477654934455:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.065799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.070083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:48.072238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477654934459:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.072302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.089258Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703477654934458:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-10T12:07:48.189157Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703477654934511:3106] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:48.645410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.700209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.763534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/s ... K 2026-01-10T12:08:41.701303Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [8615242e-908aa025-58edbbf0-54532af8] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "8615242e-908aa025-58edbbf0-54532af8" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "8615242e-908aa025-58edbbf0-54532af8" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-10T12:08:41.701659Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [DeleteMessageBatch] requestId [8615242e-908aa025-58edbbf0-54532af8] Got succesfult GRPC response. 2026-01-10T12:08:41.701785Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [8615242e-908aa025-58edbbf0-54532af8] reply ok 2026-01-10T12:08:41.701956Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [DeleteMessageBatch] requestId [8615242e-908aa025-58edbbf0-54532af8] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 197 SourceAddress: 584b:85b6:d47b:0:404b:85b6:d47b:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2026-01-10T12:08:41.702093Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:33602) <- (200 , 44 bytes) 2026-01-10T12:08:41.702228Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:33602) connection closed 2026-01-10T12:08:41.703440Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:33608) incoming connection opened 2026-01-10T12:08:41.703491Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:33608) -> (POST /Root, 106 bytes) 2026-01-10T12:08:41.703756Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18b8:85b6:d47b:0:b8:85b6:d47b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 483409b1-febb271b-b2035dfb-ce774c0d 2026-01-10T12:08:41.704107Z node 7 :HTTP_PROXY INFO: http_req.cpp:543: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] got new request from [18b8:85b6:d47b:0:b8:85b6:d47b:0] 2026-01-10T12:08:41.704489Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:477: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2026-01-10T12:08:41.704509Z node 7 :HTTP_PROXY INFO: http_req.cpp:301: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:08:41.704680Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 483409b1-febb271b-b2035dfb-ce774c0d 2026-01-10T12:08:41.704768Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [483409b1-febb271b-b2035dfb-ce774c0d] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2026-01-10T12:08:41.704778Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [483409b1-febb271b-b2035dfb-ce774c0d] Request proxy started 2026-01-10T12:08:41.704844Z node 7 :SQS DEBUG: service.cpp:761: Request [483409b1-febb271b-b2035dfb-ce774c0d] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2026-01-10T12:08:41.704896Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [483409b1-febb271b-b2035dfb-ce774c0d] Get configuration duration: 0ms Http output full {} 2026-01-10T12:08:41.704982Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [483409b1-febb271b-b2035dfb-ce774c0d] Send get leader node request to sqs service for cloud4/000000000000000101v0 2026-01-10T12:08:41.705022Z node 7 :SQS DEBUG: service.cpp:581: Request [483409b1-febb271b-b2035dfb-ce774c0d] Leader node for queue [cloud4/000000000000000101v0] is 7 2026-01-10T12:08:41.705051Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [483409b1-febb271b-b2035dfb-ce774c0d] Got leader node for queue response. Node id: 7. Status: 0 2026-01-10T12:08:41.705150Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [483409b1-febb271b-b2035dfb-ce774c0d] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" 2026-01-10T12:08:41.705217Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [483409b1-febb271b-b2035dfb-ce774c0d] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" 2026-01-10T12:08:41.705258Z node 7 :SQS DEBUG: action.h:133: Request [483409b1-febb271b-b2035dfb-ce774c0d] Request started. Actor: [7:7593703704394525302:3950] 2026-01-10T12:08:41.705290Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7593703704394525302:3950] 2026-01-10T12:08:41.705304Z node 7 :SQS DEBUG: service.cpp:754: Request [483409b1-febb271b-b2035dfb-ce774c0d] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2026-01-10T12:08:41.705331Z node 7 :SQS DEBUG: action.h:627: Request [483409b1-febb271b-b2035dfb-ce774c0d] Get configuration duration: 0ms 2026-01-10T12:08:41.705345Z node 7 :SQS TRACE: action.h:647: Request [483409b1-febb271b-b2035dfb-ce774c0d] Got configuration. Root url: http://ghrun-tjszgfszoq.auto.internal:8771, Shards: 4, Fail: 0 2026-01-10T12:08:41.705377Z node 7 :SQS TRACE: action.h:662: Request [483409b1-febb271b-b2035dfb-ce774c0d] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2026-01-10T12:08:41.705389Z node 7 :SQS TRACE: action.h:427: Request [483409b1-febb271b-b2035dfb-ce774c0d] DoRoutine 2026-01-10T12:08:41.705429Z node 7 :SQS TRACE: queue_leader.cpp:2426: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2026-01-10T12:08:41.705443Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [483409b1-febb271b-b2035dfb-ce774c0d] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2026-01-10T12:08:41.705454Z node 7 :SQS DEBUG: queue_leader.cpp:1164: Request [483409b1-febb271b-b2035dfb-ce774c0d] No known messages in this shard. Skip attempt to add messages to infly 2026-01-10T12:08:41.705463Z node 7 :SQS DEBUG: queue_leader.cpp:1170: Request [483409b1-febb271b-b2035dfb-ce774c0d] Already tried to add messages to infly 2026-01-10T12:08:41.705494Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2026-01-10T12:08:41.705544Z node 7 :SQS TRACE: action.h:264: Request [483409b1-febb271b-b2035dfb-ce774c0d] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } } 2026-01-10T12:08:41.705611Z node 7 :SQS TRACE: proxy_service.h:35: Request [483409b1-febb271b-b2035dfb-ce774c0d] Sending sqs response: { ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-10T12:08:41.705717Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 0ms 2026-01-10T12:08:41.705804Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-10T12:08:41.705847Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7593703704394525301:2547]: ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-10T12:08:41.705886Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7593703704394525302:3950]. Found: 1 2026-01-10T12:08:41.706003Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [483409b1-febb271b-b2035dfb-ce774c0d] HandleResponse: { ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2026-01-10T12:08:41.706065Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [483409b1-febb271b-b2035dfb-ce774c0d] Sending reply from proxy actor: { ReceiveMessage { RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" } RequestId: "483409b1-febb271b-b2035dfb-ce774c0d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-10T12:08:41.706245Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] Got succesfult GRPC response. 2026-01-10T12:08:41.706283Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] reply ok 2026-01-10T12:08:41.706350Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ReceiveMessage] requestId [483409b1-febb271b-b2035dfb-ce774c0d] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 154 SourceAddress: 18b8:85b6:d47b:0:b8:85b6:d47b:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2026-01-10T12:08:41.706426Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:33608) <- (200 , 2 bytes) 2026-01-10T12:08:41.706525Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:33608) connection closed 2026-01-10T12:08:41.712387Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-10T12:08:41.712421Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 14ms 2026-01-10T12:08:41.712572Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-10T12:08:41.712598Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2026-01-10T12:08:41.712664Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 15ms 2026-01-10T12:08:41.712784Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2026-01-10T12:08:41.712904Z node 7 :SQS DEBUG: queue_leader.cpp:1915: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/3] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TExportToS3Tests::Changefeeds >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2026-01-10T12:07:41.412190Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703446017189546:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:41.412262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:41.476822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:42.165065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:42.216228Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703446017189525:2081] 1768046861411553 != 1768046861411556 2026-01-10T12:07:42.226700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:42.226856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:42.232804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:42.242026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:42.436251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:42.487894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:42.508604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:42.508625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:42.508635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:42.508710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:42.885161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:42.892778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:43.153255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:43.162147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:43.170039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:43.372048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.569713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.632363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:43.638343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.702875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.742516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.805870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.860537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.908569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.945791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.990154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.029331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703467492027673:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.029417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703467492027665:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.029687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.032336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703467492027680:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.032390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.033904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:46.063474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703467492027679:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:46.146894Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703467492027732:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:46.412785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703446017189546:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:46.412893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:46.704341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.740584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.779416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation ... ISIBILITY_ID). Mode: COMPILE_AND_EXEC 2026-01-10T12:08:43.380549Z node 7 :SQS TRACE: executor.cpp:154: Request [aefea4a0-2d785703-fcd104bd-a8541be1] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 3, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 1902612799742245372, "NOW": 1768046923377, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1768046923146, "Offset": 1, "NewVisibilityDeadline": 1768046924377}, {"LockTimestamp": 1768046923226, "Offset": 2, "NewVisibilityDeadline": 1768046925377}]} 2026-01-10T12:08:43.381037Z node 7 :SQS TRACE: executor.cpp:203: Request [aefea4a0-2d785703-fcd104bd-a8541be1] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001<\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> IncrementalBackup::BackupRestore >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> IncrementalBackup::SimpleBackup >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState [GOOD] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> IncrementalBackup::E2EBackupCollection >> IncrementalBackup::MultiBackup >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2026-01-10T12:08:14.928873Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703589406794970:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:14.928921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.231573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.243066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.243187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.357616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.372934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.538721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:15.604990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:15.605017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:15.605035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:15.605137Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:15.924240Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:15.927677Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:15.927706Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:15.929249Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26295, port: 26295 2026-01-10T12:08:15.929350Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:15.962177Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.008564Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:16.056483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:16.057190Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:16.057244Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.103177Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.152419Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.162674Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****4v2g (19A2BB1C) () has now valid token of ldapuser@ldap 2026-01-10T12:08:16.163057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:18.903641Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703605190082736:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:18.903714Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:18.986495Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.142523Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.143851Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703605190082708:2081] 1768046898897898 != 1768046898897901 2026-01-10T12:08:19.158176Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.158318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.168943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.427345Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.427369Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.427387Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.427461Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:19.440202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.856201Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:19.859641Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:19.859667Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:19.860399Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:65373, port: 65373 2026-01-10T12:08:19.860483Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:19.918143Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:19.976763Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:20.022594Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:20.022756Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****rE8g (0D91BC31) () has now valid token of ldapuser@ldap 2026-01-10T12:08:23.496427Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703628253695816:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:23.496589Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:23.547468Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.667704Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:23.669503Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703628253695774:2081] 1768046903494510 != 1768046903494513 2026-01-10T12:08:23.685032Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:23.685122Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:23.712154Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:23.879812Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:23.883758Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:23.883783Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:23.883791Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:23.883867Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.096333Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.100342Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.100373Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.101180Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:3462 ldap://localhost:3462 ldap://localhost:11111, port: 3462 2026-01-10T12:08:24.101250Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:24.158482Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:24.212742Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:24.256574Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:24.257345Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:24.257401Z node 3 :LDAP_AUTH_PROVIDER DEBUG ... ,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.344621Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.346180Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****nZtQ (ADE28002) () has now valid token of ldapuser@ldap 2026-01-10T12:08:29.021981Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703654629022756:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:29.022026Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:29.176225Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:29.337845Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.337938Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.342028Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.342584Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703654629022732:2081] 1768046909019114 != 1768046909019117 2026-01-10T12:08:29.370847Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.408336Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:29.572815Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:29.572843Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:29.572852Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:29.572941Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:29.754878Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:29.764708Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:29.764752Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:29.765652Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:63051, port: 63051 2026-01-10T12:08:29.765719Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:29.779892Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:29.829604Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:29.881331Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****gofw (D937FE68) () has now valid token of ldapuser@ldap 2026-01-10T12:08:30.006666Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:35.016187Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593703676082104104:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:35.029297Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:35.084371Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.332306Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593703676082104060:2081] 1768046914952855 != 1768046914952858 2026-01-10T12:08:35.332366Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.568582Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:35.568706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:35.569739Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:35.650659Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:35.776210Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:36.016266Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:36.072856Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:36.072883Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:36.072895Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:36.072987Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:36.427088Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:36.431224Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:36.431263Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:36.432107Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:19238, port: 19238 2026-01-10T12:08:36.432221Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:36.461600Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:36.516933Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:19238. Invalid credentials 2026-01-10T12:08:36.517583Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****6Rbw (2F1353E1) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:19238. Invalid credentials)' 2026-01-10T12:08:41.540997Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703707555555752:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:41.541048Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:41.608524Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:41.732497Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:41.732589Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:41.738944Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:41.742851Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:41.743386Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:41.746045Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703707555555728:2081] 1768046921538229 != 1768046921538232 2026-01-10T12:08:41.970760Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:41.996755Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:41.996781Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:41.996788Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:41.996870Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:42.210815Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:42.211302Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:42.211321Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:42.212072Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:9917, port: 9917 2026-01-10T12:08:42.212180Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:42.228693Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:42.272677Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:9917. Invalid credentials 2026-01-10T12:08:42.273182Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****uHQg (2354C2D4) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:9917. Invalid credentials)' |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2026-01-10T12:07:43.648665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703457081582010:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:43.648791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:43.739044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:44.245757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:44.326205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:44.326324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:44.499636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:44.611489Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:44.763822Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703457081581985:2081] 1768046863636887 != 1768046863636890 2026-01-10T12:07:44.769442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:44.788188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:44.836968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:44.836990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:44.837001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:44.837087Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:45.283434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:45.289747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:45.546222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:45.556241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:45.672638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.814296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.863192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.954478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-10T12:07:45.960162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.008985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.066527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.127013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.213746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.273353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.342472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.419955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703478556420131:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.420149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.420492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703478556420144:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.420534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.420612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703478556420143:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.425549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:48.442962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703478556420147:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:48.510640Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703478556420198:3108] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:48.644209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703457081582010:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:48.644289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:49.329670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.380378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.417586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB ca ... -1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2026-01-10T12:08:45.826428Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request SendMessageBatch working duration: 128ms 2026-01-10T12:08:45.826594Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse SendMessageBatch { RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "2376e7d2-d9016025-d380fded-9553a820" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "cee9aa55-86533e9-22fb8c2f-97c49126" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-10T12:08:45.826732Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7593703721508583777:2505]: SendMessageBatch { RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "2376e7d2-d9016025-d380fded-9553a820" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "cee9aa55-86533e9-22fb8c2f-97c49126" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-10T12:08:45.826828Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7593703721508583783:3757]. Found: 1 2026-01-10T12:08:45.827043Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e] HandleResponse: { SendMessageBatch { RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "2376e7d2-d9016025-d380fded-9553a820" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "cee9aa55-86533e9-22fb8c2f-97c49126" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2026-01-10T12:08:45.827203Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e] Sending reply from proxy actor: { SendMessageBatch { RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "2376e7d2-d9016025-d380fded-9553a820" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "cee9aa55-86533e9-22fb8c2f-97c49126" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2026-01-10T12:08:45.827797Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [SendMessageBatch] requestId [e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e] Got succesfult GRPC response. 2026-01-10T12:08:45.828034Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessageBatch] requestId [e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e] reply ok 2026-01-10T12:08:45.828206Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [SendMessageBatch] requestId [e4106cb-b3ca6cd6-9bfa4c8a-ed69ee5e] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 617 SourceAddress: 5867:1a3e:687b:0:4067:1a3e:687b:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2026-01-10T12:08:45.828403Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:57530) <- (200 , 464 bytes) 2026-01-10T12:08:45.828515Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:57530) connection closed 2026-01-10T12:08:45.829724Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2026-01-10T12:08:45.829775Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 4ms 2026-01-10T12:08:45.829827Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2026-01-10T12:08:45.829839Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2026-01-10T12:08:45.829920Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2026-01-10T12:08:45.829994Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2026-01-10T12:08:45.830291Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TExportToS3Tests::Changefeeds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2026-01-10T12:07:42.860621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703453439680001:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:42.866053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:42.944055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:43.504591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:43.547099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:43.547228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:43.592547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:43.900260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:43.920301Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703453439679943:2081] 1768046862852870 != 1768046862852873 2026-01-10T12:07:43.928497Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:43.948184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:44.176885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:44.176912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:44.176929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:44.177033Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:44.655277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:45.006751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:45.022886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:45.025457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:45.240619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.490157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.583656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:45.592336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.706992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.784669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.843250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.901495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.940809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.052864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.127293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.861080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703453439680001:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:47.861183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:48.109903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703479209485382:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.110023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.110357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703479209485390:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.110658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703479209485396:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.110740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.115368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:48.128052Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703479209485398:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:48.183423Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703479209485450:3109] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:48.635904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.667621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.707491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB ca ... d] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2026-01-10T12:08:46.010386Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [b723112c-1990472-23673a9d-927e34d] Request proxy started 2026-01-10T12:08:46.010640Z node 7 :SQS DEBUG: service.cpp:761: Request [b723112c-1990472-23673a9d-927e34d] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2026-01-10T12:08:46.010710Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [b723112c-1990472-23673a9d-927e34d] Get configuration duration: 0ms 2026-01-10T12:08:46.010801Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [b723112c-1990472-23673a9d-927e34d] Send get leader node request to sqs service for cloud4/000000000000000301v0 2026-01-10T12:08:46.011096Z node 7 :SQS DEBUG: service.cpp:581: Request [b723112c-1990472-23673a9d-927e34d] Leader node for queue [cloud4/000000000000000301v0] is 7 2026-01-10T12:08:46.011152Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [b723112c-1990472-23673a9d-927e34d] Got leader node for queue response. Node id: 7. Status: 0 2026-01-10T12:08:46.011259Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [b723112c-1990472-23673a9d-927e34d] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "b723112c-1990472-23673a9d-927e34d" 2026-01-10T12:08:46.011338Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [b723112c-1990472-23673a9d-927e34d] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "b723112c-1990472-23673a9d-927e34d" 2026-01-10T12:08:46.011385Z node 7 :SQS DEBUG: action.h:133: Request [b723112c-1990472-23673a9d-927e34d] Request started. Actor: [7:7593703728377909409:5077] 2026-01-10T12:08:46.011727Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7593703728377909409:5077] 2026-01-10T12:08:46.011744Z node 7 :SQS DEBUG: service.cpp:754: Request [b723112c-1990472-23673a9d-927e34d] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2026-01-10T12:08:46.012357Z node 7 :SQS TRACE: executor.cpp:286: Request [831d3106-7a068211-318e7b87-8bba7940] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976710853 Step: 1768046926051 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k0\":\"v\"}" } } } } } 2026-01-10T12:08:46.012383Z node 7 :SQS DEBUG: executor.cpp:287: Request [831d3106-7a068211-318e7b87-8bba7940] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 38ms 2026-01-10T12:08:46.012928Z node 7 :SQS TRACE: executor.cpp:325: Request [831d3106-7a068211-318e7b87-8bba7940] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976710853 Step: 1768046926051 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k0\":\"v\"}" } } } } } 2026-01-10T12:08:46.013026Z node 7 :SQS TRACE: executor.cpp:327: Request [831d3106-7a068211-318e7b87-8bba7940] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{\"k0\":\"v\"}"} 2026-01-10T12:08:46.013201Z node 7 :SQS DEBUG: executor.cpp:401: Request [831d3106-7a068211-318e7b87-8bba7940] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 40ms 2026-01-10T12:08:46.013249Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [831d3106-7a068211-318e7b87-8bba7940] Sending executed reply 2026-01-10T12:08:46.013644Z node 7 :SQS DEBUG: action.h:627: Request [b723112c-1990472-23673a9d-927e34d] Get configuration duration: 2ms 2026-01-10T12:08:46.013670Z node 7 :SQS TRACE: action.h:647: Request [b723112c-1990472-23673a9d-927e34d] Got configuration. Root url: http://ghrun-tjszgfszoq.auto.internal:8771, Shards: 1, Fail: 0 2026-01-10T12:08:46.013699Z node 7 :SQS TRACE: action.h:427: Request [b723112c-1990472-23673a9d-927e34d] DoRoutine 2026-01-10T12:08:46.013791Z node 7 :SQS TRACE: action.h:264: Request [b723112c-1990472-23673a9d-927e34d] SendReplyAndDie from action actor { ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } } 2026-01-10T12:08:46.013911Z node 7 :SQS TRACE: proxy_service.h:35: Request [b723112c-1990472-23673a9d-927e34d] Sending sqs response: { ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } RequestId: "b723112c-1990472-23673a9d-927e34d" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k0" Value: "v" } } 2026-01-10T12:08:46.014068Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } RequestId: "b723112c-1990472-23673a9d-927e34d" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k0" Value: "v" } 2026-01-10T12:08:46.014104Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7593703728377909409:5077]. Found: 1 2026-01-10T12:08:46.014141Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7593703728377909407:2720]: ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } RequestId: "b723112c-1990472-23673a9d-927e34d" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k0" Value: "v" } 2026-01-10T12:08:46.014881Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [b723112c-1990472-23673a9d-927e34d] HandleResponse: { ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } RequestId: "b723112c-1990472-23673a9d-927e34d" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k0" Value: "v" } }, status: OK Http output full {"Tags":{"k0":"v"}} 2026-01-10T12:08:46.014987Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [b723112c-1990472-23673a9d-927e34d] Sending reply from proxy actor: { ListQueueTags { RequestId: "b723112c-1990472-23673a9d-927e34d" Tags { Key: "k0" Value: "v" } } RequestId: "b723112c-1990472-23673a9d-927e34d" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k0" Value: "v" } } 2026-01-10T12:08:46.015235Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [b723112c-1990472-23673a9d-927e34d] Got succesfult GRPC response. 2026-01-10T12:08:46.015312Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [b723112c-1990472-23673a9d-927e34d] reply ok 2026-01-10T12:08:46.015391Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [b723112c-1990472-23673a9d-927e34d] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 170 SourceAddress: 9821:7d2d:947b:0:8021:7d2d:947b:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-01-10T12:08:46.015502Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46764) <- (200 , 19 bytes) 2026-01-10T12:08:46.015622Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46764) connection closed |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> BsControllerConfig::OverlayMap >> BsControllerConfig::OverlayMap [GOOD] >> BsControllerConfig::OverlayMapCrossReferences ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-10T12:08:15.331108Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703593103052620:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.335552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.834122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.861360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.861459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.997445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:16.049435Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:16.062816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:16.189484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:16.189514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:16.189527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:16.189609Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.340375Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.342324Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.342355Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.343344Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24144, port: 24144 2026-01-10T12:08:16.344228Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.356309Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:16.404668Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:16.449344Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****gAHA (359D8AE6) () has now valid token of ldapuser@ldap 2026-01-10T12:08:16.449706Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:19.597921Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703613174072107:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.597968Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:19.700774Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.845425Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.868273Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703613174072081:2081] 1768046899587998 != 1768046899588001 2026-01-10T12:08:19.912162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.915360Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.915442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.924685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:20.125559Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:20.125583Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:20.125591Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:20.125674Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.226319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:20.303444Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:20.307408Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:20.307457Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:20.308241Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:21101, port: 21101 2026-01-10T12:08:20.308395Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:20.323342Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:20.368385Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:20.369385Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:20.369453Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:20.416702Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:20.460518Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:20.461476Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****bpBQ (F3ABE8C1) () has now valid token of ldapuser@ldap 2026-01-10T12:08:24.411735Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703631634324836:2248];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:24.411806Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:24.455320Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:24.616242Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.636296Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703631634324626:2081] 1768046904367223 != 1768046904367226 2026-01-10T12:08:24.707118Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.712264Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.712773Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:24.731479Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.877511Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.891858Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.891896Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.891903Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.892015Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:25.001154Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:25.004276Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:25.004447Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:25.005154Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22579, port: 22579 2026-01-10T12:08:25.005223Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:25.009514Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:25.058086Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****DXWg (CE0D81C3) () has now valid token of ldapuser@ldap 2026-01-10T12:08:30.140493Z node 4 :KQP_PROXY WARN: kq ... 594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:30.405663Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:30.446470Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:30.627023Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:30.627047Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:30.627055Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:30.627142Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:30.771342Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:30.773975Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:30.774015Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:30.774841Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:24546 ldap://localhost:24546 ldap://localhost:11111, port: 24546 2026-01-10T12:08:30.775105Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:30.800717Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:30.844625Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:30.845234Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:30.845280Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:30.892559Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:30.940460Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:30.941450Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****rUwQ (AD696E32) () has now valid token of ldapuser@ldap 2026-01-10T12:08:31.080312Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:36.695352Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:36.712282Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:37.024229Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:37.074922Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:37.093205Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593703684757240178:2081] 1768046916521072 != 1768046916521075 2026-01-10T12:08:37.095755Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:37.095843Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:37.233443Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:37.496769Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:37.496802Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:37.496810Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:37.496897Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:37.499026Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:37.608281Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:37.803610Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:37.808157Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:37.808190Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:37.808955Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:7303, port: 7303 2026-01-10T12:08:37.809040Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:37.820295Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:37.868048Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:37.868860Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:37.868915Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:37.920378Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:37.968385Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:37.969876Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****_qJQ (A3EC5D79) () has now valid token of ldapuser@ldap 2026-01-10T12:08:43.189670Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703712252516146:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:43.199184Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:43.275799Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:43.469608Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:43.488225Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703712252516097:2081] 1768046923174233 != 1768046923174236 2026-01-10T12:08:43.496909Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:43.497014Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:43.571744Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:43.580094Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:43.760712Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:43.760740Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:43.760748Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:43.760837Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:43.836278Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:43.837118Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:43.837146Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:43.837923Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17484, port: 17484 2026-01-10T12:08:43.838060Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:43.864311Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-10T12:08:43.864441Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:17484. Bad search filter 2026-01-10T12:08:43.864693Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****y_HQ (1DC547CC) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:17484. Bad search filter)' |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2026-01-10T12:08:15.358065Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703592868429121:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.358135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.810805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.810888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.905816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.964255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703592868428973:2081] 1768046895338369 != 1768046895338372 2026-01-10T12:08:15.991454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.998293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:16.136965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:16.136987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:16.136995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:16.137086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.162833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:16.365826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:16.394783Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.399173Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.399204Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.400051Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:61086, port: 61086 2026-01-10T12:08:16.400881Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.420743Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:16.468331Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:16.468959Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:16.469054Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.516464Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.564890Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:16.566170Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****-8Cg (F5D1FCA1) () has now valid token of ldapuser@ldap 2026-01-10T12:08:19.364263Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****-8Cg (F5D1FCA1) 2026-01-10T12:08:19.367884Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:61086, port: 61086 2026-01-10T12:08:19.368002Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:19.388246Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:19.388991Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:61086 return no entries 2026-01-10T12:08:19.389242Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****-8Cg (F5D1FCA1) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:61086 return no entries)' 2026-01-10T12:08:20.357565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703592868429121:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:20.357657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:22.366199Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****-8Cg (F5D1FCA1) 2026-01-10T12:08:27.600610Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703643813693898:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:27.600716Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:27.876244Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:27.952251Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703643813693672:2081] 1768046907543467 != 1768046907543470 2026-01-10T12:08:27.966568Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:27.973811Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:27.980319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:28.054427Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:28.208371Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:28.312850Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:28.312878Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:28.312886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:28.312969Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:28.584301Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:28.590648Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:28.596662Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:28.596707Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:28.597456Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:31880, port: 31880 2026-01-10T12:08:28.597525Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:28.609510Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:28.609916Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:31880. Server is busy 2026-01-10T12:08:28.610402Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****IRzg (E61F23EC) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:31880. Server is busy)' 2026-01-10T12:08:28.610677Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:28.610697Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:28.611549Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:31880, port: 31880 2026-01-10T12:08:28.611625Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:28.630589Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:28.630986Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:31880. Server is busy 2026-01-10T12:08:28.631221Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****IRzg (E61F23EC) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:31880. Server is busy)' 2026-01-10T12:08:30.588225Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****IRzg (E61F23EC) 2026-01-10T12:08:30.588522Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:30.588545Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:30.667104Z node 2 :LDAP_AUTH_PROVIDER DEBU ... /include/__functional/function.h:431:12 #11 0x00001c2c136e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #12 0x00001c2c136e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #13 0x00001c2c18cc in Execute /-S/util/thread/factory.h:15:13 #14 0x00001c2c18cc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #15 0x00001b204d54 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #16 0x00001aea6d46 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 #17 0x7fe4c9a94ac2 (/lib/x86_64-linux-gnu/libc.so.6+0x94ac2) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) #18 0x7fe4c9b268bf (/lib/x86_64-linux-gnu/libc.so.6+0x1268bf) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Address 0x7be4c7f194d8 is located in stack of thread T0 at offset 1240 in frame #0 0x00001ada7eef in NKikimr::LdapRefreshGroupsInfoWithError(NKikimr::(anonymous namespace)::ESecurityConnectionType const&) /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1171 This frame has 61 object(s): [32, 80) 'truth.i1735' [112, 160) 'agg.tmp3.i1736' [192, 208) 'retval.i1657' [224, 236) 'name.i1658' [256, 304) 'truth.i' [336, 384) 'agg.tmp3.i' [416, 432) 'retval.i1097' [448, 460) 'name.i' [480, 832) 'server' (line 1175) [896, 944) 'agg.tmp' [976, 1024) 'responses' (line 1176) [1056, 1104) 'updatedResponses' (line 1177) [1136, 1272) 'ldapServer' (line 1185) <== Memory access at offset 1240 is inside this variable [1344, 1440) 'ref.tmp21' (line 1185) [1472, 1520) 'loginResponse' (line 1187) [1552, 1560) 'agg.tmp37' [1584, 1592) 'handle' (line 1191) [1616, 1640) 'ref.tmp70' (line 1195) [1680, 1704) 'ref.tmp78' (line 1195) [1744, 1752) 'ref.tmp94' (line 1195) [1776, 1800) 'ref.tmp95' (line 1195) [1840, 1864) 'ref.tmp124' (line 1196) [1904, 1928) 'ref.tmp132' (line 1196) [1968, 1976) 'ref.tmp149' (line 1196) [2000, 2024) 'ref.tmp150' (line 1196) [2064, 2072) 'ref.tmp186' (line 1197) [2096, 2120) 'ref.tmp191' (line 1197) [2160, 2184) 'ref.tmp202' (line 1197) [2224, 2248) 'ref.tmp210' (line 1197) [2288, 2312) 'ref.tmp252' (line 1198) [2352, 2376) 'ref.tmp260' (line 1198) [2416, 2424) 'ref.tmp277' (line 1198) [2448, 2472) 'ref.tmp278' (line 1198) [2512, 2520) 'agg.tmp321' [2544, 2568) 'ref.tmp361' (line 1208) [2608, 2632) 'ref.tmp369' (line 1208) [2672, 2680) 'ref.tmp386' (line 1208) [2704, 2728) 'ref.tmp387' (line 1208) [2768, 2792) 'ref.tmp422' (line 1209) [2832, 2856) 'ref.tmp430' (line 1209) [2896, 2904) 'ref.tmp447' (line 1209) [2928, 2952) 'ref.tmp448' (line 1209) [2992, 3000) 'ref.tmp477' (line 1211) [3024, 3032) 'ref.tmp495' (line 1211) [3056, 3080) 'ref.tmp498' (line 1211) [3120, 3128) 'ref.tmp513' (line 1211) [3152, 3160) 'ref.tmp516' (line 1211) [3184, 3208) 'ref.tmp528' (line 1211) [3248, 3272) 'ref.tmp536' (line 1211) [3312, 3336) 'ref.tmp578' (line 1212) [3376, 3384) 'ref.tmp616' (line 1218) [3408, 3432) 'ref.tmp619' (line 1218) [3472, 3480) 'ref.tmp634' (line 1218) [3504, 3512) 'ref.tmp637' (line 1218) [3536, 3560) 'ref.tmp649' (line 1218) [3600, 3624) 'ref.tmp657' (line 1218) [3664, 3688) 'ref.tmp717' (line 1220) [3728, 3752) 'ref.tmp725' (line 1220) [3792, 3800) 'ref.tmp742' (line 1220) [3824, 3848) 'ref.tmp743' (line 1220) [3888, 3896) 'ref.tmp746' (line 1220) HINT: this may be a false positive if your program uses some custom stack unwind mechanism, swapcontext or vfork (longjmp and C++ exceptions *are* supported) SUMMARY: AddressSanitizer: stack-use-after-scope /-S/contrib/libs/cxxsupp/libcxx/include/vector:1461:28 in begin Shadow bytes around the buggy address: 0x7be4c7f19200: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0x7be4c7f19280: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0x7be4c7f19300: 00 00 00 00 00 00 00 00 f2 f2 f2 f2 f2 f2 f2 f2 0x7be4c7f19380: 00 00 00 00 00 00 f2 f2 f2 f2 f8 f8 f8 f8 f8 f8 0x7be4c7f19400: f2 f2 f2 f2 f8 f8 f8 f8 f8 f8 f2 f2 f2 f2 f8 f8 =>0x7be4c7f19480: f8 f8 f8 f8 f8 f8 f8 f8 f8 f8 f8[f8]f8 f8 f8 f2 0x7be4c7f19500: f2 f2 f2 f2 f2 f2 f2 f2 f8 f8 f8 f8 f8 f8 f8 f8 0x7be4c7f19580: f8 f8 f8 f8 f2 f2 f2 f2 f8 f8 f8 f8 f8 f8 f2 f2 0x7be4c7f19600: f2 f2 00 f2 f2 f2 f8 f2 f2 f2 f8 f8 f8 f2 f2 f2 0x7be4c7f19680: f2 f2 f8 f8 f8 f2 f2 f2 f2 f2 f8 f2 f2 f2 f8 f8 0x7be4c7f19700: f8 f2 f2 f2 f2 f2 f8 f8 f8 f2 f2 f2 f2 f2 f8 f8 Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb Thread T129 (ydb-core-securi) created by T122 (ydb-core-securi) here: #0 0x00001ae8d981 in pthread_create /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:250:3 #1 0x00001b1f59a5 in Start /-S/util/system/thread.cpp:230:27 #2 0x00001b1f59a5 in TThread::Start() /-S/util/system/thread.cpp:315:34 #3 0x00001c2bb41a in Run /-S/util/thread/factory.h:36:13 #4 0x00001c2bb41a in IThreadFactory::Run(std::__y1::function const&) /-S/util/thread/factory.cpp:72:10 #5 0x00004df89a70 in operator() /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_simple_server.cpp:57:44 #6 0x00004df89a70 in __invoke<(lambda at /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_simple_server.cpp:41:38) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #7 0x00004df89a70 in __call<(lambda at /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_simple_server.cpp:41:38) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #8 0x00004df89a70 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #9 0x00004df89a70 in std::__y1::__function::__func const&, bool)::$_0, std::__y1::allocator const&, bool)::$_0>, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #10 0x00001c2c136e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #11 0x00001c2c136e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #12 0x00001c2c136e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #13 0x00001c2c5ab9 in Execute /-S/util/thread/factory.h:15:13 #14 0x00001c2c5ab9 in (anonymous namespace)::TPoolThread::TThreadImpl::Process(void*) /-S/util/thread/pool.cpp:725:32 #15 0x00001c2c51c5 in TAdaptiveThreadPool::TImpl::TThread::DoExecute() /-S/util/thread/pool.cpp:418:38 #16 0x00001c2c18cc in Execute /-S/util/thread/factory.h:15:13 #17 0x00001c2c18cc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #18 0x00001b204d54 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #19 0x00001aea6d46 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 Thread T122 (ydb-core-securi) created by T0 here: #0 0x00001ae8d981 in pthread_create /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:250:3 #1 0x00001b1f59a5 in Start /-S/util/system/thread.cpp:230:27 #2 0x00001b1f59a5 in TThread::Start() /-S/util/system/thread.cpp:315:34 #3 0x00001c2be7fe in Run /-S/util/thread/factory.h:36:13 #4 0x00001c2be7fe in Run /-S/util/thread/factory.h:53:14 #5 0x00001c2be7fe in TThread /-S/util/thread/pool.cpp:394:47 #6 0x00001c2be7fe in AddThreadNoLock /-S/util/thread/pool.cpp:507:17 #7 0x00001c2be7fe in TAdaptiveThreadPool::TImpl::Add(IObjectInQueue*) /-S/util/thread/pool.cpp:469:17 #8 0x00001c2be2f6 in TAdaptiveThreadPool::Add(IObjectInQueue*) /-S/util/thread/pool.cpp:587:12 #9 0x00001c2c0272 in IThreadPool::SafeAdd(IObjectInQueue*) /-S/util/thread/pool.cpp:679:5 #10 0x00001c2c5788 in (anonymous namespace)::TPoolThread::DoRun(IThreadFactory::IThreadAble*) /-S/util/thread/pool.cpp:758:22 #11 0x00001c2bb41a in Run /-S/util/thread/factory.h:36:13 #12 0x00001c2bb41a in IThreadFactory::Run(std::__y1::function const&) /-S/util/thread/factory.cpp:72:10 #13 0x00004df86cbd in LdapMock::TLdapSimpleServer::TLdapSimpleServer(unsigned short, std::__y1::pair const&, bool) /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_simple_server.cpp:41:34 #14 0x00001ada874c in NKikimr::LdapRefreshGroupsInfoWithError(NKikimr::(anonymous namespace)::ESecurityConnectionType const&) /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1185:37 #15 0x00001adb0717 in NKikimr::NTestSuiteLdapAuthProviderTest_nonSecure::TTestCaseLdapRefreshGroupsInfoWithError::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1501:9 #16 0x00001adf16c7 in operator() /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1443:1 #17 0x00001adf16c7 in __invoke<(lambda at /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1443:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #18 0x00001adf16c7 in __call<(lambda at /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1443:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #19 0x00001adf16c7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #20 0x00001adf16c7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #21 0x00001b6bbec9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #22 0x00001b6bbec9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #23 0x00001b6bbec9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #24 0x00001b689f17 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #25 0x00001adf0a03 in NKikimr::NTestSuiteLdapAuthProviderTest_nonSecure::TCurrentTest::Execute() /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1443:1 #26 0x00001b68b6cf in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #27 0x00001b6b5d2c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #28 0x7fe4c9a29d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) ==127744==ABORTING |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2026-01-10T12:07:43.864006Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703455977905928:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:43.865007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:44.401025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:44.463056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:44.463172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:44.595101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:44.763923Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:44.783297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703455977905822:2081] 1768046863821315 != 1768046863821318 2026-01-10T12:07:44.805072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:44.875243Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:45.004733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:45.004754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:45.004760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:45.004832Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:45.441949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:45.717741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:45.723755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:45.725668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:45.762405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:07:45.860283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.010358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.082655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.164732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-10T12:07:46.174156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.258294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.327002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.389693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.435794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.518403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.562069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.819588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477452743962:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.819716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.820112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477452743975:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.820151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477452743974:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.820197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.827715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:48.845841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703455977905928:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:48.845919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:48.847728Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703477452743978:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:48.940014Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703477452744031:3109] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:49.363207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.394490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.427215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... l { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.721745Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 251ms 2026-01-10T12:08:47.722251Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.722290Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:08:47.722433Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 251ms 2026-01-10T12:08:47.723062Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.733402Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.733441Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 269ms 2026-01-10T12:08:47.733733Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.733774Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:08:47.733871Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 269ms 2026-01-10T12:08:47.734262Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.744489Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703731426660589:2437]: Pool not found 2026-01-10T12:08:47.745525Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:08:48.155696Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703731426660583:2432]: Pool not found 2026-01-10T12:08:48.156245Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:08:48.160507Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7593703735721628007:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:48.160689Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7593703735721628008:2459], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:08:48.160771Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:48.164456Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7593703735721628011:2460], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:48.164560Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:48.380583Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:48054) incoming connection opened 2026-01-10T12:08:48.380683Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:48054) -> (POST /, 87 bytes) 2026-01-10T12:08:48.380858Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [781f:2f12:ff7b:0:601f:2f12:ff7b:0] request [CreateStream] url [/] database [] requestId: 1b6bd23a-31963fbb-5d22de56-fa1fe67f 2026-01-10T12:08:48.381483Z node 8 :HTTP_PROXY WARN: http_req.cpp:971: http request [CreateStream] requestId [1b6bd23a-31963fbb-5d22de56-fa1fe67f] got new request with incorrect json from [781f:2f12:ff7b:0:601f:2f12:ff7b:0] database '' 2026-01-10T12:08:48.381711Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [1b6bd23a-31963fbb-5d22de56-fa1fe67f] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2026-01-10T12:08:48.381940Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:48054) <- (400 InvalidArgumentException, 135 bytes) 2026-01-10T12:08:48.382052Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:48054) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2026-01-10T12:08:48.382095Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:48054) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 1b6bd23a-31963fbb-5d22de56-fa1fe67f Content-Type: application/x-amz-json-1.1 Content-Length: 135 2026-01-10T12:08:48.382203Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:48054) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2026-01-10T12:07:41.384480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703446599615031:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:41.384531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:41.489571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:42.045402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:42.060152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:42.060279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:42.076638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:42.220440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:42.239678Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703446599615006:2081] 1768046861382550 != 1768046861382553 2026-01-10T12:07:42.395427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:42.450276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:42.462324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:42.462356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:42.462376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:42.462531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:42.834372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:42.841988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:43.110779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:43.118167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:43.121607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:43.294138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.421194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.506839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:43.517210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.629474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.694754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.742839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.796113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.882362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.952497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.008447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.388209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703446599615031:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:46.388286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:46.550650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703468074453147:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.550801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.551168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703468074453159:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.551237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703468074453160:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.551371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:46.555379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:46.568228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703468074453163:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:46.667212Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703468074453214:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:47.318505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.382277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.429819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation ... \005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?l\003?n\022FROM_NAME\003\022\000\000\013?4\003?6\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\210\003?\212\024BATCH_SIZE\003\022\000\003?:\000\003?<\000\006\004?@\003\203\014\000\003\203\014\000\003\003?B\000\277\007\003? \000\002\001\000\006\002?\002\t\211\004\202\203\005@?\034\"\000\003?\262\022truncated\t\211\004?\034?\036\203\004\036\000?\250\003?\270\002\002\001\000\003/" } } 2026-01-10T12:08:47.287882Z node 8 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE_AND_EXEC 2026-01-10T12:08:47.288309Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.288478Z node 8 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Serializing params: {"FROM_USER": "", "FROM_NAME": "", "BATCH_SIZE": 1000} 2026-01-10T12:08:47.288765Z node 8 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\022\nFlags\010Name\010Args\016Payload\022Parameter\016Account\nValue\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\006\207\203\001H\207\203\001H\207\203\001H\026\006\032\"\000\003?\006\020settings\t\211\004?\026\205\004?\026\203\014\020List$Truncated\203\004\036\000\t\211\026?\036\203\005\004\200\205\006\203\004\203\004\203\004\026\006\032\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?$ \000\001\205\000\000\000\000\0011\000\000\000\000\000\000\000?\024\005?,\003?&\002\003?(\004\003?*\006\007\013?2\t\351\000?.\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?V\003?X\022FROM_USER\003\022\000\t\351\000?0\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?l\003?n\022FROM_NAME\003\022\000\000\013?4\003?6\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\210\003?\212\024BATCH_SIZE\003\022\000\003?:\000\003?<\000\006\004?@\003\203\014\000\003\203\014\000\003\003?B\000\277\007\003? \000\002\001\000\006\002?\002\t\211\004\202\203\005@?\034\"\000\003?\262\022truncated\t\211\004?\034?\036\203\004\036\000?\250\003?\270\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE$FROM_NAME$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_NAME": "", "BATCH_SIZE": 1000} 2026-01-10T12:08:47.316933Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.316971Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 27ms 2026-01-10T12:08:47.317213Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.317248Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:08:47.317670Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 29ms 2026-01-10T12:08:47.318006Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:08:47.489327Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703733530388178:2439]: Pool not found 2026-01-10T12:08:47.489543Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:08:47.950541Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703733530388176:2438]: Pool not found 2026-01-10T12:08:47.951582Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:08:47.956673Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7593703733530388309:2459], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:47.956798Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7593703733530388310:2460], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:08:47.956869Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:47.961814Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7593703733530388314:2461], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:47.961922Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:48.234859Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:39408) incoming connection opened 2026-01-10T12:08:48.234948Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:39408) -> (POST /Root, 4 bytes) 2026-01-10T12:08:48.235125Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8de:d14:7a7b:0:a0de:d14:7a7b:0] request [CreateStream] url [/Root] database [/Root] requestId: b3f79559-83b3f3d5-d0cd660d-8bb6491a 2026-01-10T12:08:48.235715Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [b3f79559-83b3f3d5-d0cd660d-8bb6491a] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2026-01-10T12:08:48.235990Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:39408) <- (400 MissingParameter, 127 bytes) 2026-01-10T12:08:48.236041Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:39408) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2026-01-10T12:08:48.236097Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:39408) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: b3f79559-83b3f3d5-d0cd660d-8bb6491a Content-Type: application/x-amz-json-1.1 Content-Length: 127 2026-01-10T12:08:48.236225Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:39408) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2026-01-10T12:08:48.434331Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703733530388307:2458]: Pool not found 2026-01-10T12:08:48.435298Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:21.525123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:21.525224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:21.525263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:21.525317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:21.525352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:21.525382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:21.525438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:21.525533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:21.526383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:21.526717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:21.620101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:21.620192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.640350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:21.652407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:21.652637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:21.660571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:21.660805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:21.661532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:21.662026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:21.665594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:21.665814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:21.667087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:21.667150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:21.667276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:21.667325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:21.667388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:21.667609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:21.836358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.837948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:21.838541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:08:49.680298Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-10T12:08:49.680432Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:08:49.681146Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:08:49.681256Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:08:49.681298Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:08:49.681349Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-10T12:08:49.681393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-10T12:08:49.681505Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-10T12:08:49.681576Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2153] 2026-01-10T12:08:49.692070Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:08:49.692831Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:08:49.692961Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:08:49.693043Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:08:49.693134Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:08:49.693180Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-10T12:08:49.693238Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-10T12:08:49.697237Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:08:49.697389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:08:49.697479Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1536:3328] TestWaitNotification: OK eventTxId 105 2026-01-10T12:08:49.699041Z node 6 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-10T12:08:49.706260Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:08:49.706352Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 105 2026-01-10T12:08:49.706450Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-10T12:08:49.706549Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:08:49.706686Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2026-01-10T12:08:49.706774Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:08:49.706837Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2026-01-10T12:08:49.706942Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-10T12:08:49.707102Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:08:49.710988Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-10T12:08:49.711204Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-10T12:08:49.711424Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:08:49.716829Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:08:49.717182Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-105 2026-01-10T12:08:49.717448Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-10T12:08:49.717587Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-10T12:08:49.717702Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:08:49.717795Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-10T12:08:49.717905Z node 6 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-10T12:08:49.718065Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-10T12:08:49.720506Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:08:49.720837Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:08:49.721232Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:08:49.721414Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:08:49.721507Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:08:49.721566Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-10T12:08:49.721644Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-10T12:08:49.724709Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2026-01-10T12:08:49.725018Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:08:49.725098Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:08:49.725734Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:08:49.725882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:08:49.725954Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1756:3544] TestWaitNotification: OK eventTxId 105 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2026-01-10T12:07:43.721261Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703455526178287:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:43.721295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:44.237190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:44.241176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:44.241290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:44.246523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:44.483785Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:44.488737Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703455526178266:2081] 1768046863719889 != 1768046863719892 2026-01-10T12:07:44.647240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:44.708459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:44.708476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:44.708487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:44.708598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:44.796290Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:45.082547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:45.089467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:45.345746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:45.356990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:45.359969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:45.399373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:07:45.523947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.763980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.814165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.870039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.913838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:45.972206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.026321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.080369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.141096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.190245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.424555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477001016404:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.424728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477001016412:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.424832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.425393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703477001016419:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.425443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:48.429580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:48.452544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703477001016418:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:48.536698Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703477001016471:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:48.722146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703455526178287:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:48.722207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:49.038950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.086904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:49.127948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... ot new request from [389c:d33e:4d7c:0:209c:d33e:4d7c:0] database '/Root' stream 'testtopic' 2026-01-10T12:08:50.913455Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStream] requestId [dfc08c6-9168918d-9fdd2f64-1d3c9d96] [auth] Authorized successfully 2026-01-10T12:08:50.913546Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStream] requestId [dfc08c6-9168918d-9fdd2f64-1d3c9d96] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:08:50.916201Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:08:50.916241Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.916260Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.916286Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.916295Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7593703744368843586:2494], now have 1 active actors on pipe 2026-01-10T12:08:50.916307Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-10T12:08:50.916354Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037909] server connected, pipe [8:7593703744368843588:2496], now have 1 active actors on pipe 2026-01-10T12:08:50.916391Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037908] server connected, pipe [8:7593703744368843587:2495], now have 1 active actors on pipe 2026-01-10T12:08:50.916394Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037910] server connected, pipe [8:7593703744368843589:2497], now have 1 active actors on pipe 2026-01-10T12:08:50.916435Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7593703744368843590:2498], now have 1 active actors on pipe Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1768046931,"StorageLimitMb":0,"StreamName":"testtopic"}} 2026-01-10T12:08:50.918238Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [dfc08c6-9168918d-9fdd2f64-1d3c9d96] reply ok 2026-01-10T12:08:50.918902Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:55012) <- (200 , 1672 bytes) 2026-01-10T12:08:50.919017Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:55012) connection closed 2026-01-10T12:08:50.919784Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7593703744368843586:2494] destroyed 2026-01-10T12:08:50.919819Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037908] server disconnected, pipe [8:7593703744368843587:2495] destroyed 2026-01-10T12:08:50.919843Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037909] server disconnected, pipe [8:7593703744368843588:2496] destroyed 2026-01-10T12:08:50.919865Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037910] server disconnected, pipe [8:7593703744368843589:2497] destroyed 2026-01-10T12:08:50.919889Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7593703744368843590:2498] destroyed 2026-01-10T12:08:50.927664Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:08:50.927708Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.927725Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.927751Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.927767Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-10T12:08:50.935559Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:08:50.935600Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.935615Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.935639Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.935654Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-10T12:08:50.941281Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:50.941320Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.941336Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.941360Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.941379Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-10T12:08:50.953426Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:08:50.953462Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.953477Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.953496Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.953511Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-10T12:08:51.020398Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:08:51.020440Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.020458Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:51.020481Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.020497Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-10T12:08:51.026568Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:08:51.026606Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.026622Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:51.026646Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.026660Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-10T12:08:51.036258Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:08:51.036298Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.036316Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:51.036338Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.036354Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-10T12:08:51.040459Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:51.040494Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.040511Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:51.040531Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.040545Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-10T12:08:51.056390Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:08:51.056430Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.056445Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:51.056467Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:51.056480Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2026-01-10T12:07:42.134538Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703450646644813:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:42.135332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:42.208630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:42.688821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:42.689020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:42.692609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:42.693020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:42.801280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:42.816200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703450646644780:2081] 1768046862094126 != 1768046862094129 2026-01-10T12:07:42.997731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:43.024556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:43.024577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:43.024585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:43.024686Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:43.163525Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:43.435527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:43.755276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:43.762060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:43.763880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:43.916637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.060226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.144022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:44.150582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.229641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-10T12:07:44.236755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.285888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.360385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.411642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.465845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.526608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.570958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.128464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472121482920:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.128621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.129164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472121482932:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.129292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472121482933:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.129673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.134769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:47.134995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703450646644813:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:47.135070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:47.148053Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703472121482936:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:47.233856Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703472121482990:3109] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:47.678091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.759447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.816265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation ... 50.841924Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [ListShards] requestId [a9660f5-4f1e3f09-73897799-7fe63f74] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:08:50.843218Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7593703742615687558:2485], now have 1 active actors on pipe 2026-01-10T12:08:50.843261Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7593703742615687559:2486], now have 1 active actors on pipe 2026-01-10T12:08:50.844231Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListShards] requestId [a9660f5-4f1e3f09-73897799-7fe63f74] reply ok 2026-01-10T12:08:50.844414Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:35434) <- (200 , 449 bytes) 2026-01-10T12:08:50.844497Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:35434) connection closed Http output full {"NextToken":"CJvXvL66MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CJvXvL66MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-01-10T12:08:50.845625Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7593703742615687559:2486] destroyed 2026-01-10T12:08:50.846526Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:35444) incoming connection opened 2026-01-10T12:08:50.846591Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:35444) -> (POST /Root, 157 bytes) 2026-01-10T12:08:50.846816Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f8c7:5f11:727b:0:e0c7:5f11:727b:0] request [ListShards] url [/Root] database [/Root] requestId: 5f65def2-6f96e5f0-7296dc1c-93c20e53 2026-01-10T12:08:50.847267Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [ListShards] requestId [5f65def2-6f96e5f0-7296dc1c-93c20e53] got new request from [f8c7:5f11:727b:0:e0c7:5f11:727b:0] database '/Root' stream 'teststream' 2026-01-10T12:08:50.848660Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7593703742615687558:2485] destroyed E0000 00:00:1768046930.849179 136233 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:08:50.849022Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ListShards] requestId [5f65def2-6f96e5f0-7296dc1c-93c20e53] [auth] Authorized successfully 2026-01-10T12:08:50.849104Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [ListShards] requestId [5f65def2-6f96e5f0-7296dc1c-93c20e53] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:08:50.850286Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7593703742615687571:2491], now have 1 active actors on pipe 2026-01-10T12:08:50.850339Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7593703742615687570:2490], now have 1 active actors on pipe Http output full {"NextToken":"CKLXvL66MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CKLXvL66MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-01-10T12:08:50.851239Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListShards] requestId [5f65def2-6f96e5f0-7296dc1c-93c20e53] reply ok 2026-01-10T12:08:50.851559Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:35444) <- (200 , 449 bytes) 2026-01-10T12:08:50.851646Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:35444) connection closed 2026-01-10T12:08:50.853369Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7593703742615687570:2490] destroyed 2026-01-10T12:08:50.853407Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7593703742615687571:2491] destroyed 2026-01-10T12:08:50.880229Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:08:50.880261Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.880275Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.880294Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.880308Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-10T12:08:50.885006Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:08:50.885035Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.885047Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.885065Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.885077Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-10T12:08:50.888205Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:08:50.888231Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.888244Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.888260Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.888271Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-10T12:08:50.904197Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:08:50.904231Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.904242Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.904260Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.904270Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-10T12:08:50.904332Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:08:50.904342Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.904351Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.904361Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.904370Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-10T12:08:50.968291Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7593703742615687238:2458]: Pool not found 2026-01-10T12:08:50.970950Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:08:50.984215Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:08:50.984254Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.984272Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.984296Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.984311Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-10T12:08:50.988277Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:08:50.988306Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.988322Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.988343Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.988357Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-10T12:08:50.996431Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:08:50.996470Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.996486Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:08:50.996514Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:08:50.996531Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2026-01-10T12:07:41.738891Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703446644798177:2165];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:41.739027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:42.235600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:42.314694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:42.314810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:42.366181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:42.521198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:42.532221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703446644798032:2081] 1768046861670328 != 1768046861670331 2026-01-10T12:07:42.552142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:42.624714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:42.624741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:42.624763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:42.624839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:42.762586Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:42.947580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:42.955234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:07:43.347885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:07:43.364580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:07:43.368009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:07:43.651951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:43.934309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.016468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:07:44.021662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.137123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.214350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.283054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.387188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.488936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.568412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:44.646900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:46.736196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703446644798177:2165];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:46.736302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:47.220041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472414603467:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.220252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.220782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472414603479:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.220836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703472414603480:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.220897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:47.226055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:47.244535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703472414603483:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:07:47.332587Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703472414603534:3107] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:47.932510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:47.989730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:48.032852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... 03\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\'\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] Test command err: 2026-01-10T12:08:15.505435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703595504759848:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.505489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:16.012612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:16.012739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:16.097395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:16.107176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:16.153824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:16.270017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:16.270057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:16.270065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:16.270211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.309815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:16.378740Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.382214Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.382237Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.383772Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:2828, port: 2828 2026-01-10T12:08:16.383881Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:16.392786Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-10T12:08:16.393446Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****JFLQ (FAEDE7B2) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-10T12:08:16.393852Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.393880Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.394868Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:2828, port: 2828 2026-01-10T12:08:16.394969Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:16.399150Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-10T12:08:16.399311Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****JFLQ (FAEDE7B2) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-10T12:08:16.500250Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:19.524275Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703611260401768:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.524558Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:19.556217Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.721501Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.721618Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.725800Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.727650Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703611260401643:2081] 1768046899501660 != 1768046899501663 2026-01-10T12:08:19.748666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.792181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.968697Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.968723Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.968730Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.968808Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.040224Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:20.041396Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:20.041420Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:20.042206Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****-L8g (C5D44D7C) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2026-01-10T12:08:23.967326Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703628228176269:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:23.967369Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:24.145235Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.240046Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:24.240261Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703628228176145:2081] 1768046903922870 != 1768046903922873 2026-01-10T12:08:24.272936Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.273028Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.301149Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.491966Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.519696Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.519745Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.519755Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.519839Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.660235Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.664676Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.664698Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.665536Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****DXBQ (DD39034B) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2026-01-10T12:08:29.888117Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703654411747310:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:29.889432Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:29.906995Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:30.047262Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:30.050013Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:30.050098Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:30.080251Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703654411747208:2081] 1768046909884138 != 1768046909884141 2026-01-10T12:08:30.088541Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:30.106524Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:30.232899Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:30.336702Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is ... 6658146131:7762515]; 2026-01-10T12:08:35.388332Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:35.621382Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.923828Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.931971Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:35.948280Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593703679299172408:2081] 1768046915303373 != 1768046915303376 2026-01-10T12:08:36.055965Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:36.056068Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:36.105443Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:36.506449Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:36.507080Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:36.507094Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:36.507103Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:36.507200Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:36.512182Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:36.518877Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:36.526203Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:36.526246Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:36.527336Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****XjMw (D27600FD) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2026-01-10T12:08:42.717149Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703711158948374:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:42.717361Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:42.731544Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:42.970974Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:42.988247Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703711158948204:2081] 1768046922685924 != 1768046922685927 2026-01-10T12:08:42.990088Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:42.990258Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:42.993577Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:43.112003Z node 6 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:08:43.112032Z node 6 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:08:43.132071Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:43.179485Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:43.179508Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:43.179515Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:43.179606Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:43.204211Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:43.362647Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:43.362682Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:43.362772Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****-hBA (0F4A0707)) 2026-01-10T12:08:43.720662Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:44.364337Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:44.364365Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root 2026-01-10T12:08:44.364670Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:44.364693Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:44.365677Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:10571, port: 10571 2026-01-10T12:08:44.365787Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:44.445453Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:44.488739Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:44.489825Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:44.489888Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:44.536703Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:44.580585Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:44.581989Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****-hBA (0F4A0707) () has now valid token of ldapuser@ldap 2026-01-10T12:08:48.161281Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:48.170628Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:48.311957Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:48.322803Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593703736053153116:2081] 1768046928031592 != 1768046928031595 2026-01-10T12:08:48.334318Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:48.334421Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:48.370788Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:48.488247Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:48.502462Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:48.502490Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:48.502500Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:48.502641Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:48.573289Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:48.573323Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:48.573377Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****Fzsw (DB0288EA)) 2026-01-10T12:08:49.140621Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:51.152295Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Fzsw (DB0288EA) () has now permanent error message 'Login state is not available' 2026-01-10T12:08:51.152410Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root, ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-10T12:08:15.268954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703592345705726:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.269706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.703728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.715437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.715521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.782335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.935847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.937577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.937732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703592345705691:2081] 1768046895261847 != 1768046895261850 2026-01-10T12:08:16.171914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:16.171960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:16.171967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:16.172050Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.282674Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:16.346227Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.352283Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.352315Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.353915Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13891, port: 13891 2026-01-10T12:08:16.354007Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.420608Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:16.464417Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:16.509771Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****FqIw (9EA58B95) () has now valid token of ldapuser@ldap 2026-01-10T12:08:16.512055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.547655Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:19.548511Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703613229157527:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.549025Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:19.804169Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.807072Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.817970Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703613229157291:2081] 1768046899464360 != 1768046899464363 2026-01-10T12:08:19.825684Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.825806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.892704Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:20.063184Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:20.074030Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:20.074049Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:20.074056Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:20.074140Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.263159Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:20.267052Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:20.267087Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:20.267849Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:6398, port: 6398 2026-01-10T12:08:20.267967Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:20.340750Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:20.385199Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****r_1Q (E271092B) () has now valid token of ldapuser@ldap 2026-01-10T12:08:20.474735Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:24.099224Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703630895821617:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:24.099359Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:24.188206Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:24.332209Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:24.336220Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703630895821474:2081] 1768046904089455 != 1768046904089458 2026-01-10T12:08:24.360217Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.360580Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.360649Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.392595Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.615867Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.624629Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.624655Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.624665Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.624743Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.776308Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.779029Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.779063Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.779713Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:3318, port: 3318 2026-01-10T12:08:24.779770Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:24.849038Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:24.894914Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:24.896513Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:24.896579Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.944515Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entry ... 5547 2026-01-10T12:08:37.216180Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:37.280736Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:37.280761Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:37.280769Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:37.280851Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:37.503240Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:37.512022Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:37.512069Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:37.512851Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://qqq:8284 ldaps://localhost:8284 ldaps://localhost:11111, port: 8284 2026-01-10T12:08:37.512949Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:37.620776Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:37.672464Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:37.673395Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:37.673459Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:37.720597Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:37.770746Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:37.772318Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ojtQ (0E8736B2) () has now valid token of ldapuser@ldap 2026-01-10T12:08:37.772547Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:42.416230Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:42.416437Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:42.519748Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703711969971580:2081] 1768046922246046 != 1768046922246049 2026-01-10T12:08:42.531762Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:42.543344Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:42.543454Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:42.575582Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:42.699775Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:42.816914Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:42.816937Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:42.816945Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:42.817036Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:43.040237Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:43.043955Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:43.043996Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:43.044838Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:4244, port: 4244 2026-01-10T12:08:43.044938Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:43.124845Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:43.179765Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:43.180772Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:43.180824Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:43.227118Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:43.272751Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:43.274159Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****cWpA (99BBD462) () has now valid token of ldapuser@ldap 2026-01-10T12:08:43.310873Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:47.777277Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593703730680815498:2180];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:47.777603Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:47.863705Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:48.091340Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:48.111191Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:48.111290Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:48.115705Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593703730680815356:2081] 1768046927750858 != 1768046927750861 2026-01-10T12:08:48.133506Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:48.145358Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:48.461124Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:48.461150Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:48.461159Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:48.461241Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:48.623695Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:48.669578Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:48.670093Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:48.670133Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:48.670933Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:18505, port: 18505 2026-01-10T12:08:48.671898Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:48.743668Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-10T12:08:48.743770Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:18505. Bad search filter 2026-01-10T12:08:48.744361Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****6-IA (E15229D4) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:18505. Bad search filter)' 2026-01-10T12:08:48.804230Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSchemeShardMoveTest::TwoTables >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::ResetCachedPath |85.4%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TSchemeShardMoveTest::MoveIndex |85.4%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::Boot |85.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::MoveMigratedTable |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:56.563025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:56.563110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:56.563162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:56.563209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:56.563246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:56.563289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:56.563346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:56.563405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:56.564294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:56.564604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:56.660164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:56.660241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:56.675377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:56.681077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:56.681278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:56.689635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:56.689885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:56.690640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:56.691108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:56.694810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:56.695011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:56.696341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:56.696515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:56.696642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:56.696691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:56.696746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:56.697019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.704759Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:08:56.897043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:56.897369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.897606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:56.897658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:56.899386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:56.899507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:56.907531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:56.907808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:56.908058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.908165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:56.908221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:56.908267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:56.913553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.913648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:56.913700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:56.917491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.917570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:56.917650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:56.917711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:56.932789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:56.935212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:56.935412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:56.936723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:56.936889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:56.936963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:56.937275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:56.937334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:56.937554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:56.937671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:56.940099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:56.940161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 17446Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:08:57.617707Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 227us result status StatusPathDoesNotExist 2026-01-10T12:08:57.617901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:08:57.618400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:08:57.618607Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 208us result status StatusSuccess 2026-01-10T12:08:57.619068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:57.620046Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:08:57.620240Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 193us result status StatusPathDoesNotExist 2026-01-10T12:08:57.620373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:08:57.620896Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:08:57.621170Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 237us result status StatusSuccess 2026-01-10T12:08:57.621566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:57.622302Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:08:57.622478Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2026-01-10T12:08:57.623006Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-10T12:08:15.340731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703592346991080:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.340798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.696197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.708336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.708437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.732296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.939053Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:16.016157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:16.180690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:16.180714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:16.180722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:16.180788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.360571Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:16.369583Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.374549Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.374580Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.376195Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:1849, port: 1849 2026-01-10T12:08:16.376301Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.460600Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:16.517220Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****UsZg (4F344793) () has now valid token of ldapuser@ldap 2026-01-10T12:08:19.477126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:19.480263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.617357Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.617477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.627718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.644306Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703612473104320:2081] 1768046899329948 != 1768046899329951 2026-01-10T12:08:19.662065Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.687227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.824041Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.824071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.824095Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.824169Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.068228Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:20.072171Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:20.072205Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:20.072996Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14007, port: 14007 2026-01-10T12:08:20.073099Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:20.144667Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:14007. Invalid credentials 2026-01-10T12:08:20.145265Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****x4Kg (631A6B78) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:14007. Invalid credentials)' 2026-01-10T12:08:23.714498Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703628935668271:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:23.714779Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:23.833008Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.961384Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:23.966592Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703628935668147:2081] 1768046903692705 != 1768046903692708 2026-01-10T12:08:24.002621Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.002719Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.047975Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.048250Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.260756Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.260791Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.260799Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.260888Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.420229Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.421863Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.421889Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.422555Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:3307, port: 3307 2026-01-10T12:08:24.422637Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:24.500507Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:3307. Invalid credentials 2026-01-10T12:08:24.500966Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****K8NA (FB0CED67) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:3307. Invalid credentials)' 2026-01-10T12:08:28.921766Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703650836376675:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:28.921897Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:28.995914Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:29.221370Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.224476Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703650836376531:2081] 1768046908903348 != 1768046908903351 2026-01-10T12:08:29.260217Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:29.265673Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.265761Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.329847Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.532630Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: P ... : [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:34.320216Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:34.328032Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:34.328138Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:34.348355Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593703671651131670:2081] 1768046913981621 != 1768046913981624 2026-01-10T12:08:34.364545Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:34.373307Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:34.575020Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:34.748659Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:34.748686Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:34.748694Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:34.748784Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:35.024227Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:35.031416Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:35.031454Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:35.032374Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:35.032433Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:22295, port: 22295 2026-01-10T12:08:35.032509Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:35.100708Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:35.152521Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:35.153730Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:35.153785Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:35.200486Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:35.244585Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:35.246057Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****n37Q (FF4A1E54) () has now valid token of ldapuser@ldap 2026-01-10T12:08:39.004204Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593703671651131759:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:39.004282Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:39.048215Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****n37Q (FF4A1E54) 2026-01-10T12:08:39.049169Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:22295, port: 22295 2026-01-10T12:08:39.049254Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:39.104613Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:39.153814Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:39.154943Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:39.154989Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:39.204388Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:39.248539Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:39.249725Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****n37Q (FF4A1E54) () has now valid token of ldapuser@ldap 2026-01-10T12:08:43.052296Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****n37Q (FF4A1E54) 2026-01-10T12:08:46.143718Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703728163882203:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:46.143918Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:46.192225Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:46.365748Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:46.367176Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:46.367273Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:46.372266Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703728163882077:2081] 1768046926133668 != 1768046926133671 2026-01-10T12:08:46.388905Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:46.415225Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:46.672958Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:46.672983Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:46.672992Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:46.673085Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:46.762901Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:46.964241Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:46.967737Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:46.967770Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:46.968697Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:15761, port: 15761 2026-01-10T12:08:46.968768Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:47.040662Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:47.092920Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****h5YA (692F1515) () has now valid token of ldapuser@ldap 2026-01-10T12:08:47.164713Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:51.144331Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7593703728163882203:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:51.144440Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:51.153959Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****h5YA (692F1515) 2026-01-10T12:08:51.154075Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:15761, port: 15761 2026-01-10T12:08:51.154161Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:51.233890Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:51.282972Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****h5YA (692F1515) () has now valid token of ldapuser@ldap 2026-01-10T12:08:54.165389Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****h5YA (692F1515) >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TSchemeShardMoveTest::ResetCachedPath [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted |85.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-10T12:08:15.273506Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703595257641315:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.273580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.652223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.743005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.743103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.774276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.869343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.875662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703595257641295:2081] 1768046895272792 != 1768046895272795 2026-01-10T12:08:15.960356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:15.961039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:15.961048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:15.961055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:15.961160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:16.104263Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:16.112706Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:16.112738Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:16.113616Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:9266, port: 9266 2026-01-10T12:08:16.114281Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:16.138784Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:16.181171Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****9hhg (2423CD4A) () has now valid token of ldapuser@ldap 2026-01-10T12:08:19.267798Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703610989843951:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.268427Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:19.344366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.491153Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.491247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.539365Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.548315Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703610989843722:2081] 1768046899248365 != 1768046899248368 2026-01-10T12:08:19.557835Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.575669Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.818335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.818357Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.818370Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.818455Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.008417Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:20.013688Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:20.013724Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:20.014464Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:28971, port: 28971 2026-01-10T12:08:20.014586Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:20.041029Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:28971. Invalid credentials 2026-01-10T12:08:20.041304Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ieYQ (FE29E242) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:28971. Invalid credentials)' 2026-01-10T12:08:24.082503Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703632533230642:2226];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:24.082682Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:24.276161Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.279204Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:24.283880Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703628238263126:2081] 1768046903991887 != 1768046903991890 2026-01-10T12:08:24.305470Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.305547Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.348503Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.516159Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.536693Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.536717Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.536725Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.536819Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.644260Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.651525Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.651555Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.652288Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:1268, port: 1268 2026-01-10T12:08:24.652360Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:24.663535Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:1268. Invalid credentials 2026-01-10T12:08:24.663782Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****oEWg (F8364DF4) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:1268. Invalid credentials)' 2026-01-10T12:08:29.557552Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593703655166185126:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:29.557717Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:29.701723Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:29.789064Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.789152Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.789625Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.792618Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703655166184997:2081] 1768046909538293 != 1768046909538296 2026-01-10T12:08:29.830859Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.908575Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme ... 52Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.250037Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:35.289198Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:35.289292Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:35.301511Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593703677519457344:2081] 1768046914885970 != 1768046914885973 2026-01-10T12:08:35.304486Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:35.396788Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:35.693818Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:35.693845Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:35.693856Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:35.693949Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:35.770598Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:35.918783Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:36.004247Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:36.054544Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:36.054581Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:36.055381Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:7916, port: 7916 2026-01-10T12:08:36.055477Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:36.092291Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:36.144354Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:36.145578Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:36.145651Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:36.192665Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:36.240493Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:36.241470Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****cwAg (83918254) () has now valid token of ldapuser@ldap 2026-01-10T12:08:39.900283Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593703677519457483:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:39.900393Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:40.905417Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****cwAg (83918254) 2026-01-10T12:08:40.905527Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:7916, port: 7916 2026-01-10T12:08:40.905611Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:40.912657Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:40.956480Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:40.956896Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:40.956931Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:41.004436Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:41.053177Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:41.054191Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****cwAg (83918254) () has now valid token of ldapuser@ldap 2026-01-10T12:08:44.912658Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****cwAg (83918254) 2026-01-10T12:08:47.476628Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703731594642955:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:47.476785Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:47.500203Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.009440s 2026-01-10T12:08:47.534117Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:47.679309Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:47.691853Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:47.692668Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:47.735145Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:47.738399Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:47.948803Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:47.948826Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:47.948833Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:47.948911Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:48.140171Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:48.147359Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:48.151648Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:48.151674Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:48.152268Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26726, port: 26726 2026-01-10T12:08:48.152327Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:48.162934Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:48.208710Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****forw (FF054AF4) () has now valid token of ldapuser@ldap 2026-01-10T12:08:48.499820Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:51.510356Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****forw (FF054AF4) 2026-01-10T12:08:51.510474Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26726, port: 26726 2026-01-10T12:08:51.510572Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:51.524026Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:51.573100Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****forw (FF054AF4) () has now valid token of ldapuser@ldap 2026-01-10T12:08:52.478394Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7593703731594642955:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:52.478492Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:54.516485Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****forw (FF054AF4) >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:57.160743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:57.160837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.160881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:57.160925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:57.160987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:57.161041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:57.161108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.161190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:57.162156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:57.162479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:57.249556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:57.249628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:57.262970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:57.267279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:57.267489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:57.279517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:57.279769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:57.280638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:57.281141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:57.285262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:57.285476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:57.286793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:57.286878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:57.287127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:57.287190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:57.287244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:57.287525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.465925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.467939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:57.468522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 44 2026-01-10T12:08:59.533767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-10T12:08:59.533949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:59.538956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-10T12:08:59.539148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000040 2026-01-10T12:08:59.539760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.539913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:59.539984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000040, at schemeshard: 72057594046678944 2026-01-10T12:08:59.540338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 129 2026-01-10T12:08:59.540518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-10T12:08:59.548988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.549066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:08:59.549400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.549457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:08:59.549711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.549783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2026-01-10T12:08:59.551304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:08:59.551423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:08:59.551473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:08:59.551540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 8 2026-01-10T12:08:59.551590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:08:59.551703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-10T12:08:59.554102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-10T12:08:59.554162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-10T12:08:59.554379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-10T12:08:59.554542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } CommitVersion { Step: 5000040 TxId: 105 } debug: NTableState::TProposedWaitParts operationId# 105:0 2026-01-10T12:08:59.556417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-10T12:08:59.556496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-10T12:08:59.556658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-10T12:08:59.556741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:08:59.556911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-10T12:08:59.556999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.557059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.557113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:08:59.557158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 129 -> 240 2026-01-10T12:08:59.558190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:08:59.560523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.560684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.561054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.561135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:08:59.561276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:08:59.561341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:08:59.561388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:08:59.561426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:08:59.561464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2026-01-10T12:08:59.561530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2723] message: TxId: 105 2026-01-10T12:08:59.561579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:08:59.561625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:08:59.561689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:08:59.561841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:08:59.563958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:08:59.564008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1251:3171] TestWaitNotification: OK eventTxId 105 >> KqpWorkloadService::TestZeroQueueSize >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |85.4%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |85.4%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex |85.5%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] >> Initializer::Simple |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:57.937082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:57.937222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.937277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:57.937330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:57.937407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:57.937452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:57.937571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.937668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:57.938781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:57.939187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:58.181418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:58.181491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:58.199802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:58.199930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:58.200149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:58.253787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:58.254130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:58.255030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.255318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:58.260977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.261198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:58.262460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.262555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.262663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:58.262718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:58.262774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:58.263117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.275802Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:08:58.446570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:58.446846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.447072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:58.447122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:58.447386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:58.447480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:58.451349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.451585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:58.451815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.451892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:58.451935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:58.451987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:58.454995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.455064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:58.455119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:58.457181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.457236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.457314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.457377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:58.461067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:58.463462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:58.463661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:58.464510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.464634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:58.464671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.464890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:58.464930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.465066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:58.465138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:58.468471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.468528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:01.166555Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:01.166797Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 397us result status StatusSuccess 2026-01-10T12:09:01.167572Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:01.168243Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:01.168478Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 249us result status StatusSuccess 2026-01-10T12:09:01.169174Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardMoveTest::Index [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:57.090001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:57.090083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.090122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:57.090169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:57.090208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:57.090260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:57.090356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:57.090433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:57.091368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:57.091666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:57.197166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:57.197239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:57.221988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:57.222147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:57.222346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:57.232104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:57.232375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:57.233212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:57.233476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:57.236105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:57.236308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:57.237577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:57.237646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:57.237756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:57.237813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:57.237865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:57.238148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.249108Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:08:57.388997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:57.389289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.389537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:57.389592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:57.389846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:57.389919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:57.395000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:57.395199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:57.395376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.395447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:57.395484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:57.395512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:57.401564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.401643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:57.401685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:57.404725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.404789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:57.404854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:57.404904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:57.418713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:57.424984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:57.425281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:57.428458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:57.428687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:57.428771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:57.429176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:57.429268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:57.429499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:57.429594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:57.433386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:57.433461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:09:01.928731Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:09:01.928842Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:09:01.928895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:09:01.928952Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-10T12:09:01.933213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:09:01.933354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:09:01.933407Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:09:01.933457Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2026-01-10T12:09:01.933514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:01.934526Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:09:01.934614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:09:01.934648Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:09:01.934682Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2026-01-10T12:09:01.934715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-10T12:09:01.934791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-10T12:09:01.937880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:09:01.937952Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:01.938244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2026-01-10T12:09:01.938387Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:09:01.938433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:09:01.938469Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:09:01.938507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:09:01.938550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-10T12:09:01.938609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2319] message: TxId: 108 2026-01-10T12:09:01.938661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:09:01.938701Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-10T12:09:01.938737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-10T12:09:01.938835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2026-01-10T12:09:01.940222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-10T12:09:01.940551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-10T12:09:01.941846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:09:01.941901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:832:2788] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-10T12:09:01.942659Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-10T12:09:01.942734Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2026-01-10T12:09:01.965331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 8589936891 } TabletId: 72075186233409546 State: 4 2026-01-10T12:09:01.965438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2026-01-10T12:09:01.967268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:09:01.967393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:09:01.967834Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-10T12:09:01.970423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:01.970713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2026-01-10T12:09:01.971860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:01.971929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2026-01-10T12:09:01.972011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:09:01.981421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:09:01.981521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:09:01.981806Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2026-01-10T12:09:01.982523Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:01.982758Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 262us result status StatusSuccess 2026-01-10T12:09:01.983221Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-10T12:08:14.930322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703590541324726:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:14.931629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.192166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.213127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.213240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.305621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.332053Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.345342Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703590541324687:2081] 1768046894925629 != 1768046894925632 2026-01-10T12:08:15.412068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:15.487828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:15.487860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:15.487867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:15.487955Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:15.720306Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:15.723191Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:15.723212Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:15.724715Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:11047, port: 11047 2026-01-10T12:08:15.724830Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:15.784504Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:15.836312Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:15.836847Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:15.836892Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:15.888414Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:15.932483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:15.938728Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****7zGw (0AFA2085) () has now valid token of ldapuser@ldap 2026-01-10T12:08:15.960223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:18.944837Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****7zGw (0AFA2085) 2026-01-10T12:08:18.945090Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:11047, port: 11047 2026-01-10T12:08:18.945168Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:19.088752Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:19.092328Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:11047 return no entries 2026-01-10T12:08:19.092945Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****7zGw (0AFA2085) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:11047 return no entries)' 2026-01-10T12:08:19.933560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703590541324726:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.933645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:21.956367Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****7zGw (0AFA2085) 2026-01-10T12:08:26.895982Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703640828271074:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:26.896053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:27.025309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:27.174242Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:27.207050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:27.207200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:27.212251Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:27.292211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:27.488654Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:27.488679Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:27.488686Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:27.488784Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:27.803869Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:27.805848Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:27.805883Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:27.806723Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14738, port: 14738 2026-01-10T12:08:27.806802Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:27.873334Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:27.874040Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:14738. Server is busy 2026-01-10T12:08:27.884536Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****lkxg (A6EE9C06) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:14738. Server is busy)' 2026-01-10T12:08:27.884850Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:27.884879Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:27.885827Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14738, port: 14738 2026-01-10T12:08:27.885881Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:27.957424Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:27.958091Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:14738. Server is busy 2026-01-10T12:08:27.958787Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:27.958881Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****lkxg (A6EE9C06) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:14738. Server is busy)' 2026-01-10T12:08:29.964217Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****lkxg (A6EE9C06) 2026-01-10T12:08:29.964529Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:29.964548Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:29.965674Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14738, port: 14738 2026-01-10T12:08:29.965735Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind ... ableExistsActor;event=undelivered;self_id=[4:7593703720000572434:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:44.454110Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:44.600214Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593703720000572391:2081] 1768046924429605 != 1768046924429608 2026-01-10T12:08:44.608263Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:44.663646Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:44.669338Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:44.669427Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:44.687006Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:44.836651Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:44.836684Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:44.836699Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:44.836777Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:44.888347Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:45.041941Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:45.047054Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:45.047093Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:45.047920Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:20583, port: 20583 2026-01-10T12:08:45.048009Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:45.074433Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:45.124439Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:45.173230Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****xLjQ (34023F18) () has now valid token of ldapuser@ldap 2026-01-10T12:08:49.165027Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593703740294740137:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:49.165113Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:49.187723Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:49.292148Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:49.303733Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:49.303827Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:49.323268Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:49.325043Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:49.433187Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:49.433210Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:49.433217Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:49.433287Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:49.512188Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:49.539243Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:49.541952Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:49.541988Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:49.542764Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:63156, port: 63156 2026-01-10T12:08:49.542834Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:49.579530Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:49.638841Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-10T12:08:49.680625Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:49.681375Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:49.681433Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:49.728779Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:49.776723Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-10T12:08:49.778128Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****EcYw (81C1E148) () has now valid token of ldapuser@ldap 2026-01-10T12:08:55.740229Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:55.744245Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:55.885799Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:55.888166Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:55.890536Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:55.896232Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703763631292547:2081] 1768046935488918 != 1768046935488921 2026-01-10T12:08:55.924276Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:55.934014Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:56.444305Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:56.568240Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:56.606747Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:56.606772Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:56.606781Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:56.606889Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:56.844280Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:56.846395Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:56.846426Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:56.847281Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10065, port: 10065 2026-01-10T12:08:56.847509Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:56.877027Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:56.921152Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-10T12:08:56.921255Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:10065. Bad search filter 2026-01-10T12:08:56.921849Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****wAqw (EF9FEBBD) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:10065. Bad search filter)' |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:58.224244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:58.224315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.224360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:58.224394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:58.224450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:58.224483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:58.224554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.224608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:58.225319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:58.225545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:58.333689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:58.333750Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:58.350961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:58.356391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:58.356602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:58.364213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:58.364433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:58.365178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.365599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:58.369220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.369412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:58.370691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.370784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.370943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:58.370995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:58.371077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:58.371391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.537739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.539934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.540034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.540134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.540242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.540308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:09:02.337864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:02.337909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:02.337963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:09:02.338022Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-10T12:09:02.339776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 8589937308 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:09:02.339838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2026-01-10T12:09:02.339999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 8589937308 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:09:02.340084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:09:02.340168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 733 RawX2: 8589937308 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:09:02.340230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:02.340266Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-10T12:09:02.340305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:09:02.340356Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:2 129 -> 240 2026-01-10T12:09:02.343523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-10T12:09:02.346951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:02.347538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:02.348246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:02.348345Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:02.348421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:09:02.348588Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2026-01-10T12:09:02.348636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-10T12:09:02.348688Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2026-01-10T12:09:02.348737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-10T12:09:02.348790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2026-01-10T12:09:02.354179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-10T12:09:02.354655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-10T12:09:02.354715Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:02.354761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:09:02.354894Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2026-01-10T12:09:02.354932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-10T12:09:02.354976Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2026-01-10T12:09:02.355010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-10T12:09:02.355048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2026-01-10T12:09:02.355105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-10T12:09:02.355188Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:09:02.355252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:09:02.355405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-10T12:09:02.355452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:02.355512Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-10T12:09:02.355541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-10T12:09:02.355576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-10T12:09:02.355603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:02.355627Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-10T12:09:02.355650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-10T12:09:02.355717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:09:02.355759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:09:02.356372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:02.356436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:09:02.356524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:09:02.356575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:09:02.356615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:02.356646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:09:02.356682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:02.375618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:09:02.375993Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [2:679:2669] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2026-01-10T12:09:02.488211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:09:02.488280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:09:02.488832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:09:02.488925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:02.488958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:1082:2965] TestWaitNotification: OK eventTxId 103 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:58.866898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:58.866986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.867024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:58.867070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:58.867134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:58.867168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:58.867231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.867298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:58.868289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:58.868565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:58.988877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:58.988958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:59.030132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:59.040854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:59.041123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:59.060698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:59.060948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:59.061748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.062252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:59.084848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.085061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:59.086405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.086476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.086592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:59.086637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:59.086681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:59.087024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.101764Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:08:59.336733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:59.337018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.337239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:59.337289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:59.337553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:59.337623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:59.344984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.345235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:59.345438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.345513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:59.345558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:59.345600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:59.353648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.353727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:59.353789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:59.360927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.360998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.361066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.361135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:59.365592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:59.372787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:59.373071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:59.374345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.374500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:59.374552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.374865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:59.374946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.375142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:59.375271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:59.381018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.381072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 57594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:02.869008Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:02.869330Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 339us result status StatusSuccess 2026-01-10T12:09:02.870271Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:02.871125Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:02.871376Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 276us result status StatusSuccess 2026-01-10T12:09:02.872234Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:59.120759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:59.120853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:59.120916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:59.120965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:59.121019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:59.121054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:59.121176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:59.121252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:59.122181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:59.122516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:59.301456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:59.301526Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:59.322654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:59.328558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:59.328798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:59.340753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:59.341044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:59.341851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.342333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:59.347453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.347667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:59.348990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.349059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.349183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:59.349236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:59.349293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:59.349570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.359222Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:08:59.543578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:59.543875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.547316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:59.547403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:59.547700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:59.547791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:59.550594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.550845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:59.551068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.551144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:59.551186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:59.551232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:59.556002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.556092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:59.556159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:59.565408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.565474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.565543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.565608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:59.594910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:59.597137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:59.597374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:59.598461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.598612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:59.598658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.598959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:59.599019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:59.599208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:59.599313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:59.601363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.601418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ep: 5000042 2026-01-10T12:09:03.283439Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:03.283534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:03.283591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-10T12:09:03.283649Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-10T12:09:03.285925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-10T12:09:03.285985Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-10T12:09:03.286084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:09:03.286117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:09:03.286153Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:09:03.286215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:09:03.286256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:09:03.286311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:128:2153] message: TxId: 281474976710760 2026-01-10T12:09:03.286372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:09:03.286408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:09:03.286437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:09:03.286505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:09:03.288372Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:09:03.288441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:09:03.288508Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-10T12:09:03.288623Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:864:2823], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:09:03.290443Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-10T12:09:03.290580Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:864:2823], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:09:03.290640Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:09:03.292287Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-10T12:09:03.292431Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:864:2823], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:09:03.292485Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-10T12:09:03.292618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:03.292662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1048:2995] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:03.293312Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:03.293603Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 328us result status StatusSuccess 2026-01-10T12:09:03.294228Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 41 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:58.516007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:58.516126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.516167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:58.516224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:58.516266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:58.516294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:58.516401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.516479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:58.517396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:58.517716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:58.619200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:58.619270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:58.653371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:58.653497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:58.653701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:58.691234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:58.691535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:58.692439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.692744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:58.698964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.699176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:58.700602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.700696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.700795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:58.700863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:58.700921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:58.701261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.880022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.881929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.882033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.882137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:58.882223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EvNotifyTxCompletionResult to actorId: [2:789:2756] message: TxId: 102 2026-01-10T12:09:03.716618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-10T12:09:03.716662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:09:03.716695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:09:03.716834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-10T12:09:03.716876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:03.716911Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-10T12:09:03.716936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-10T12:09:03.716979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:09:03.717006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:09:03.717361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:03.717415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:09:03.717496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:03.717541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:09:03.717580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:09:03.744867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:03.744943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:887:2840] 2026-01-10T12:09:03.745162Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:03.749015Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:03.749228Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 230us result status StatusPathDoesNotExist 2026-01-10T12:09:03.749367Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:09:03.749743Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:03.749901Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 171us result status StatusPathDoesNotExist 2026-01-10T12:09:03.750019Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:09:03.750362Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:03.750595Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 226us result status StatusSuccess 2026-01-10T12:09:03.751066Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:03.751622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:03.751814Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 198us result status StatusSuccess 2026-01-10T12:09:03.752142Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.6%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |85.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |85.6%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:59.352441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:59.352542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:59.352584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:59.352641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:59.352691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:59.352722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:59.352800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:59.352880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:59.353819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:59.354135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:59.476756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:59.476824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:59.494968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:59.495087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:59.495290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:59.504279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:59.504569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:59.505443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:59.505719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:59.508245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.508438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:59.509717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:59.509786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:59.509863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:59.509915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:59.509966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:59.510257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:59.698771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.699935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.700996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:59.701079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:09:05.475199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 731 RawX2: 8589937306 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:09:05.475281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:05.475329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-10T12:09:05.475376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:09:05.475419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:2 129 -> 240 2026-01-10T12:09:05.476391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:09:05.476433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:09:05.476535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:09:05.476578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:09:05.476650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 733 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:09:05.476706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:05.476740Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:09:05.476773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:09:05.476809Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:09:05.479607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-10T12:09:05.479872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:09:05.481253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-10T12:09:05.481565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:09:05.481781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-10T12:09:05.481839Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:05.481886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:09:05.482015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-10T12:09:05.482054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-10T12:09:05.482093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-10T12:09:05.482128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-10T12:09:05.482172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2026-01-10T12:09:05.482423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:09:05.482465Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:05.482501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:09:05.482571Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-10T12:09:05.482596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-10T12:09:05.482626Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-10T12:09:05.482650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-10T12:09:05.482678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2026-01-10T12:09:05.482743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:789:2756] message: TxId: 102 2026-01-10T12:09:05.482794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-10T12:09:05.482841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:09:05.482902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:09:05.483031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-10T12:09:05.483070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:05.483106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-10T12:09:05.483129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-10T12:09:05.483160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-10T12:09:05.483185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:05.483207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-10T12:09:05.483229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-10T12:09:05.483274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:09:05.483300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:09:05.483697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:05.483756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:09:05.483837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:09:05.483883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:09:05.483921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:05.483952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:09:05.483988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:05.490196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:05.490274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:891:2850] 2026-01-10T12:09:05.490420Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> TS3WrapperTests::AbortMultipartUpload |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupRestoreWithIndex |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::HeadObject [GOOD] |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::CopyPartUpload [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2026-01-10T12:09:06.897045Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1298A9C1-3F89-4338-A406-7BFF1C765047, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:28480 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9648F040-8BB8-4DFC-B24E-BE38F3FBE7EB amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-10T12:09:06.903176Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1298A9C1-3F89-4338-A406-7BFF1C765047, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-10T12:09:06.908476Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# AB8F0126-6579-4DAB-A852-4C20DAD62DB5, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:28480 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0F9921D-A1CF-46E2-B483-C2201C6D29C3 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2026-01-10T12:09:06.911761Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# AB8F0126-6579-4DAB-A852-4C20DAD62DB5, response# AbortMultipartUploadResult { } 2026-01-10T12:09:06.912150Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 02157D31-7538-4883-8241-2271CD081287, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:28480 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D1867E65-B747-4728-929D-4346D7895368 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-10T12:09:06.915210Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 02157D31-7538-4883-8241-2271CD081287, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2026-01-10T12:09:07.104499Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# F7C1D321-9DB3-4E6A-B2EC-1B9E96E2680D, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:21918 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0020618A-E9AE-4AF6-93B7-8D68B4762778 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2026-01-10T12:09:07.110167Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# F7C1D321-9DB3-4E6A-B2EC-1B9E96E2680D, response# |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::GetUnknownObject [GOOD] |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2026-01-10T12:09:07.345691Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 14A20DAC-DD45-4F84-A7E4-0F5BC0948A5E, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:18134 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CE2CE3F3-67ED-4528-AEC7-2DE95D12CCFF amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-10T12:09:07.350575Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 14A20DAC-DD45-4F84-A7E4-0F5BC0948A5E, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-10T12:09:07.350946Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# AA79507F-A5CF-4F71-8D37-0B55C0FDCB29, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:18134 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 17DAD091-016E-4C7E-9115-A81B1ED9773D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-10T12:09:07.353612Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# AA79507F-A5CF-4F71-8D37-0B55C0FDCB29, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2026-01-10T12:09:07.574801Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 339101A6-2542-4A03-B5D0-3BAC52336393, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:62520 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A2F063F3-F18A-4161-93C2-9846646719A1 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-10T12:09:07.581246Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 339101A6-2542-4A03-B5D0-3BAC52336393, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-10T12:09:07.584394Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 842D33DF-F4E6-4E08-9EFF-082ADB839415, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:62520 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AA221590-DB64-44F9-93A7-71C4D4D9222C amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2026-01-10T12:09:07.588446Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 842D33DF-F4E6-4E08-9EFF-082ADB839415, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2026-01-10T12:09:07.588844Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 0BCC3E90-FD75-4EB8-A185-565108048731, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62520 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E69804DB-7074-4F4D-BDBE-07E2E7AA2C97 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2026-01-10T12:09:07.591875Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 0BCC3E90-FD75-4EB8-A185-565108048731, response# UploadPartCopyResult { } 2026-01-10T12:09:07.592381Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 0FED8606-9A38-410B-9C5F-F1AE6B797414, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62520 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2682E6DB-82A2-4E8D-856C-CD94D97B5E6F amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2026-01-10T12:09:07.596845Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 0FED8606-9A38-410B-9C5F-F1AE6B797414, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2026-01-10T12:09:07.597302Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 85ABFDA8-7AC9-488F-95D8-6D33D47EBEAE, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:62520 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3D917AE1-B68E-4B13-B4FA-8E9F65CA53E3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2026-01-10T12:09:07.601177Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 85ABFDA8-7AC9-488F-95D8-6D33D47EBEAE, response# GetObjectResult { } >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless |85.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2026-01-10T12:09:07.734021Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 79C15C6D-DC8C-401F-826C-BE183B71A9A6, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:9295 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 773E5A62-78FB-4951-B089-657B79E8D9C4 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-10T12:09:07.739189Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 79C15C6D-DC8C-401F-826C-BE183B71A9A6, response# No response body. |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2026-01-10T12:09:07.711988Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# ABA24CBC-6D91-4574-BE76-CEBD0104BF69, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:18114 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A39AD350-1EA8-44FF-AD63-ABDB6DE08BC6 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2026-01-10T12:09:07.722310Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# ABA24CBC-6D91-4574-BE76-CEBD0104BF69, response# |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload >> TS3WrapperTests::MultipartUpload [GOOD] >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::AbortUnknownUpload [GOOD] >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::PutObject >> TTabletPipeTest::TestPipeConnectToHint ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2026-01-10T12:09:08.739015Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# FEE60C87-4ED7-4B95-B4C0-B1A9B08BDE33, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:2836 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B180F71D-D09A-4415-99E2-3A2F81DBB6C4 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-10T12:09:08.745568Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# FEE60C87-4ED7-4B95-B4C0-B1A9B08BDE33, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-10T12:09:08.746207Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 2107CE27-F997-4C7E-B90E-E6E6910FD68F, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2836 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5B16B80A-0292-4EBC-A61C-70863F32F55A amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2026-01-10T12:09:08.749972Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 2107CE27-F997-4C7E-B90E-E6E6910FD68F, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-10T12:09:08.750404Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 77F36B3D-F21A-410C-B016-DA6047CF4B1C, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2836 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1AF83BE5-A234-45B4-9084-C79387EFEB64 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2026-01-10T12:09:08.754593Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 77F36B3D-F21A-410C-B016-DA6047CF4B1C, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-10T12:09:08.755038Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 984C952A-6CB0-4675-BA0A-45E02119595D, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:2836 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DB10E449-384F-4A5D-BA4B-BAC4E88AFDD9 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-10T12:09:08.761245Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 984C952A-6CB0-4675-BA0A-45E02119595D, response# GetObjectResult { } |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> TS3WrapperTests::PutObject [GOOD] >> BasicStatistics::SimpleGlobalIndex [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2026-01-10T12:09:09.222049Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 66D56857-5491-4438-82D2-8340399C8048, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:7368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AFD9F5DF-20BF-4A50-A955-0DE0B18C0287 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-10T12:09:09.229903Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 66D56857-5491-4438-82D2-8340399C8048, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2026-01-10T12:09:08.994263Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# DE20BCC9-F00B-4E18-B20F-1424C005707A, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:17644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3E6C48CD-FD20-4DC6-BCDB-574A53AECE6E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2026-01-10T12:09:09.000329Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# DE20BCC9-F00B-4E18-B20F-1424C005707A, response# |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TResourceBroker::TestOverusage |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2026-01-10T12:09:09.657934Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 8D451AC1-96B5-409A-A5B9-69EA5F6B3C69, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:27072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0DDBB44E-9D7A-42DF-A36B-21D6466C7894 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-10T12:09:09.665138Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 8D451AC1-96B5-409A-A5B9-69EA5F6B3C69, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } |85.7%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TTabletPipeTest::TestInterconnectSession |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> BasicUsage::BasicWriteSession |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> TResourceBroker::TestNotifyActorDied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:58.548448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:58.548551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.548593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:58.548640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:58.548698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:58.548738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:58.548827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:58.548904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:58.549822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:58.550164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:58.650620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:58.650685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:58.670263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:58.670408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:58.670595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:58.679514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:58.679778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:58.680621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.680879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:58.683389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.683607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:58.684875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.684954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:58.685039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:58.685093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:58.685142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:58.685474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.693120Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:08:58.854718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:08:58.855030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.855266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:08:58.855324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:08:58.855580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:08:58.855682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:58.858224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.858437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:08:58.858669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.858750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:08:58.858811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:58.858870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:58.861135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.861203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:08:58.861267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:58.863235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.863289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:08:58.863341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.863388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:58.867375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:58.869865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:58.870088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:08:58.871414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:58.871575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:08:58.871634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.871971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:08:58.872043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:08:58.872247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:08:58.872357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:08:58.874840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:58.874894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 26-01-10T12:09:09.526700Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 65], version: 18446744073709551615 2026-01-10T12:09:09.526735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 4 2026-01-10T12:09:09.527143Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:09:09.527228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:09:09.527270Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:09:09.527299Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 66], version: 18446744073709551615 2026-01-10T12:09:09.527328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 3 2026-01-10T12:09:09.527677Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:09:09.527775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:09:09.527804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:09:09.527828Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 67], version: 18446744073709551615 2026-01-10T12:09:09.527854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 5 2026-01-10T12:09:09.527918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 4/6, is published: true 2026-01-10T12:09:09.529170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:4, at schemeshard: 72057594046678944 2026-01-10T12:09:09.529226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:4 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:09.529461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 4 2026-01-10T12:09:09.529602Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-10T12:09:09.529635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-10T12:09:09.529687Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-10T12:09:09.529716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-10T12:09:09.529767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 5/6, is published: true 2026-01-10T12:09:09.530739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.530812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.530870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.531597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-10T12:09:09.531645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:2 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:09.531869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 3 2026-01-10T12:09:09.531963Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-10T12:09:09.531990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-10T12:09:09.532024Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-10T12:09:09.532047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-10T12:09:09.532147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 6/6, is published: true 2026-01-10T12:09:09.532223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:744:2721] message: TxId: 107 2026-01-10T12:09:09.532277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-10T12:09:09.532325Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:09:09.532367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-10T12:09:09.532488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:09:09.532537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-10T12:09:09.532570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-10T12:09:09.532607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 64] was 4 2026-01-10T12:09:09.532629Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-10T12:09:09.532649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-10T12:09:09.532711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 2 2026-01-10T12:09:09.532755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:3 2026-01-10T12:09:09.532780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:3 2026-01-10T12:09:09.532835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 2 2026-01-10T12:09:09.532880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:4 2026-01-10T12:09:09.532902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:4 2026-01-10T12:09:09.532944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 3 2026-01-10T12:09:09.532981Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:5 2026-01-10T12:09:09.533003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:5 2026-01-10T12:09:09.533097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 68] was 1 2026-01-10T12:09:09.533718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:09.533772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 68], at schemeshard: 72057594046678944 2026-01-10T12:09:09.533863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 2 2026-01-10T12:09:09.535882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.536010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.536050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.539127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.539236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.539295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:09:09.540893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:09:09.540950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:4176:5926] 2026-01-10T12:09:09.541702Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:20.792058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:20.913546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:20.920661Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:20.921025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:20.921087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:21.362918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.524970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:21.525123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:21.561773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:21.614549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:22.548664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:22.549193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:22.549253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:22.549294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:22.549872Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:22.629012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.353774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:28.175826Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:28.200990Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:28.207245Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:28.290639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:28.290764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:28.330221Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:28.333364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:28.638216Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:28.638349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:28.639854Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.640749Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.641384Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.642270Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.642372Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.642640Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.642762Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.642876Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.643000Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.669511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.054630Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.099509Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:29.099628Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:29.153655Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:29.155419Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:29.155673Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:29.155746Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:29.155803Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:29.156917Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:29.157008Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:29.157069Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:29.157891Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:29.160345Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1910:2454] 2026-01-10T12:08:29.166337Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:08:29.174724Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Describe result: PathErrorUnknown 2026-01-10T12:08:29.174806Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Creating table 2026-01-10T12:08:29.174929Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:08:29.182045Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:29.182186Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2148:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:29.225515Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2221:2651] 2026-01-10T12:08:29.225941Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2221:2651], schemeshard id = 72075186224037897 2026-01-10T12:08:29.226921Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2222:2652], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:29.270267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:29.340344Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:29.340539Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:29.364665Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:29.648156Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:29.835639Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:08:30.191405Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:30.323095Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:30.323215Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2124:2587] Owner: [2:2123:2586]. Column diff is empty, finishing 2026-01-10T12:08:31.157105Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:08:52.372935Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3821:3385]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:52.373278Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:08:52.373325Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3821:3385], StatRequests.size() = 1 2026-01-10T12:08:53.329267Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3858:3401]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:53.329625Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:08:53.329676Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3858:3401], StatRequests.size() = 1 2026-01-10T12:08:54.253291Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3887:3413]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:54.253619Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:08:54.253665Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3887:3413], StatRequests.size() = 1 2026-01-10T12:08:54.769019Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:08:55.261217Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3920:3427]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:55.261619Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:08:55.261672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3920:3427], StatRequests.size() = 1 2026-01-10T12:08:56.133624Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3947:3437]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:56.133955Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:08:56.134009Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3947:3437], StatRequests.size() = 1 2026-01-10T12:08:56.572883Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:08:56.573066Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:08:56.573588Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:08:56.573874Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:08:57.011602Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3974:3447]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:57.011921Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:08:57.011974Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3974:3447], StatRequests.size() = 1 2026-01-10T12:08:57.933449Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4005:3459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:57.933801Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:08:57.933855Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4005:3459], StatRequests.size() = 1 2026-01-10T12:08:58.869182Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4036:3469]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:58.869492Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:08:58.869541Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4036:3469], StatRequests.size() = 1 2026-01-10T12:08:59.484728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:08:59.581211Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:08:59.581321Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:08:59.581376Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:08:59.581437Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:09:00.221878Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4072:3484]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:00.222345Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:09:00.222411Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4072:3484], StatRequests.size() = 1 2026-01-10T12:09:01.533024Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4105:3496]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:01.533321Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:09:01.533363Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4105:3496], StatRequests.size() = 1 2026-01-10T12:09:02.297010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:02.297717Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-10T12:09:02.298086Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:09:02.298167Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:09:02.324195Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:02.324296Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:02.324568Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-10T12:09:02.345618Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:03.024245Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4136:3510]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:03.024649Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:09:03.024705Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4136:3510], StatRequests.size() = 1 2026-01-10T12:09:04.257311Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4165:3522]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:04.257652Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:09:04.257708Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4165:3522], StatRequests.size() = 1 2026-01-10T12:09:05.344910Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4194:3532]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:05.345239Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:09:05.345279Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4194:3532], StatRequests.size() = 1 2026-01-10T12:09:05.984224Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:06.531895Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4227:3544]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:06.532297Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:09:06.532350Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4227:3544], StatRequests.size() = 1 2026-01-10T12:09:07.571963Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4256:3556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:07.572333Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-10T12:09:07.572386Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4256:3556], StatRequests.size() = 1 2026-01-10T12:09:08.123569Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:08.124147Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:09:08.124638Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:09:08.124728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:09:08.146484Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:08.146549Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:08.146812Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-10T12:09:08.161225Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:08.701339Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4289:3570]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:08.701645Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-10T12:09:08.701692Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4289:3570], StatRequests.size() = 1 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] Test command err: 2026-01-10T12:08:31.609912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703662431162120:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:31.610136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:32.042478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:32.059481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:32.059596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:32.321230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:32.336187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703662431161990:2081] 1768046911582920 != 1768046911582923 2026-01-10T12:08:32.341940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:32.358959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:32.624460Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:32.673032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:32.673069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:32.673080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:32.673185Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:33.183813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:36.610228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703662431162120:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:36.610292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:38.272714Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:38.279662Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI= temp_dir_name# 424ec041-411d-b871-f74b-84affe790a9f trace_id# 2026-01-10T12:08:38.280299Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703692495933943:2323], Start check tables existence, number paths: 2 2026-01-10T12:08:38.280398Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:38.299439Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:38.299486Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:38.299657Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703692495933943:2323], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:38.299704Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703692495933943:2323], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:38.299728Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703692495933943:2323], Successfully finished 2026-01-10T12:08:38.299872Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:38.300619Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:38.304788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:38.308478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:38.314085Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:08:38.315913Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:38.326273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:08:38.384278Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:38.453864Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703692495934025:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:38.454050Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703692495933974:2511], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:38.456562Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703692495934032:2549], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:08:38.457976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703692495934032:2549], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:08:38.477167Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:08:38.477282Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:08:38.477327Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:08:38.477362Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:08:38.477509Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ZWQzNGU1YmMtODAxZTExZDMtNWViNjhjNDMtN2VjN2MzOWI=, ActorId: [1:7593703692495933947:2327], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:08:39.581927Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703696268439927:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:39.583742Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:39.660952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:39.816262Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593703696268439872:2081] 1768046919571294 != 1768046919571297 2026-01-10T12:08:39.893397Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:39.903735Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:39.908309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:39.952291Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:39.957887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:40.286551Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:40.304575Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:40.304597Z node 2 :N ... ed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) 2026-01-10T12:09:09.376208Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7593703826994557043:2336], DatabaseId: /Root, PoolId: default, Got watch notification 2026-01-10T12:09:09.376345Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:476: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7593703826994557043:2336], DatabaseId: /Root, PoolId: default, Pool config has changed, queue size: -1, in flight limit: -1 2026-01-10T12:09:09.377008Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7593703805519719620:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:09.377074Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:09.379868Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:09:09.380015Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, TxInfo status# Committed kind# Pure total_duration# 12.115 server_duration# 12.044 queries_count# 2 trace_id# 2026-01-10T12:09:09.380072Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:09:09.380223Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:09.380273Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, EndCleanup is_final# true trace_id# 2026-01-10T12:09:09.380332Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: ExecuteState, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, Sent query response back to proxy proxy_request_id# 5 proxy_id# [6:7593703805519719618:2077] trace_id# 2026-01-10T12:09:09.380364Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: unknown state, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:09.380520Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=YjYwNTM4ODEtZjM4ZjNkYWItMmIzNzQ2YzYtOWM0NzE3N2E=, ActorId: [6:7593703826994557078:2343], ActorState: unknown state, LegacyTraceId: 01kekwywxxept7thxefhmgesgs, Session actor destroyed trace_id# 2026-01-10T12:09:09.383735Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ== temp_dir_name# b48adbed-4ea5-8627-1a52-ba87d0f98282 trace_id# 2026-01-10T12:09:09.383867Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:09:09.384191Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ReadyState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# DROP RESOURCE POOL default; rpc_actor# [6:7593703826994557102:2596] database# Root database_id# /Root pool_id# default trace_id# 2026-01-10T12:09:09.384225Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ReadyState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Request placed into pool from cache pool_id# default trace_id# 2026-01-10T12:09:09.384301Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Sending CompileQuery request trace_id# 2026-01-10T12:09:09.424202Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7593703826994557043:2336], DatabaseId: /Root, PoolId: default, Got delete notification 2026-01-10T12:09:09.424301Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:09:09.424337Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:09:09.424369Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7593703826994557123:2348], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:09:09.424843Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7593703826994557123:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:09.424903Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:09.429366Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:09:09.429526Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, TxInfo status# Committed kind# Pure total_duration# 11.268 server_duration# 11.21 queries_count# 2 trace_id# 2026-01-10T12:09:09.429591Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:09:09.429702Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:09.429723Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, EndCleanup is_final# true trace_id# 2026-01-10T12:09:09.429756Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Sent query response back to proxy proxy_request_id# 6 proxy_id# [6:7593703805519719618:2077] trace_id# 2026-01-10T12:09:09.429779Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: unknown state, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:09.429906Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=NTliZjg2Ni0yMDFiYmY4OS00NjM2ZjA1MC0zMDM5MTJlYQ==, ActorId: [6:7593703826994557103:2345], ActorState: unknown state, LegacyTraceId: 01kekwyx0869mx1hp951h77hb5, Session actor destroyed trace_id# 2026-01-10T12:09:09.456456Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=6&id=ZjZhMWJhMmMtM2M1Y2M1LTJjNDAwMGI1LTdiZmU4ZWZi, ActorId: [6:7593703822699589643:2326], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:09:09.456513Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=ZjZhMWJhMmMtM2M1Y2M1LTJjNDAwMGI1LTdiZmU4ZWZi, ActorId: [6:7593703822699589643:2326], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:09.456544Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=ZjZhMWJhMmMtM2M1Y2M1LTJjNDAwMGI1LTdiZmU4ZWZi, ActorId: [6:7593703822699589643:2326], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:09:09.456575Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=ZjZhMWJhMmMtM2M1Y2M1LTJjNDAwMGI1LTdiZmU4ZWZi, ActorId: [6:7593703822699589643:2326], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:09.456664Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=ZjZhMWJhMmMtM2M1Y2M1LTJjNDAwMGI1LTdiZmU4ZWZi, ActorId: [6:7593703822699589643:2326], ActorState: unknown state, Session actor destroyed trace_id# |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2026-01-10T12:09:10.665838Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2026-01-10T12:09:10.682627Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-10T12:09:10.688927Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-10T12:09:10.692164Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:132:2157] 2026-01-10T12:09:10.692230Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:132:2157] 2026-01-10T12:09:10.692574Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:132:2157] 2026-01-10T12:09:10.692639Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:132:2157] 2026-01-10T12:09:10.692742Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:132:2157] 2026-01-10T12:09:10.692776Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:132:2157] 2026-01-10T12:09:10.692843Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:132:2157] 2026-01-10T12:09:10.693032Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:132:2157] Type# 269877249 Reason# ActorUnknown 2026-01-10T12:09:10.693196Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:135:2159] 2026-01-10T12:09:10.693232Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:135:2159] 2026-01-10T12:09:10.693284Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:135:2159] 2026-01-10T12:09:10.693318Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:135:2159] 2026-01-10T12:09:10.693372Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:135:2159] 2026-01-10T12:09:10.693410Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:135:2159] 2026-01-10T12:09:10.693482Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:135:2159] 2026-01-10T12:09:10.693550Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:135:2159] Type# 269877249 Reason# ActorUnknown 2026-01-10T12:09:10.693650Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:137:2161] 2026-01-10T12:09:10.693669Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:137:2161] 2026-01-10T12:09:10.693714Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:137:2161] 2026-01-10T12:09:10.693739Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:137:2161] 2026-01-10T12:09:10.693777Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:137:2161] 2026-01-10T12:09:10.693794Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:137:2161] 2026-01-10T12:09:10.693837Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:137:2161] 2026-01-10T12:09:10.693911Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:137:2161] Type# 269877249 Reason# ActorUnknown |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:18.102716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:18.212579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:18.220953Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:18.221389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:18.221475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:18.776446Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:18.905950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:18.906120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:18.944301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.027366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:19.819224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:19.821332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.821390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.821418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.821577Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:19.896350Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:20.518997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:24.773521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:24.773633Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:24.791909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:24.833149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.833303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.891222Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:24.893886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.155359Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:25.155520Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:25.158781Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.159782Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.161155Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.161919Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.162102Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.162413Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.162607Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.162775Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.162954Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.181601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.439277Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:25.481216Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:25.481363Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:25.531052Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:25.532401Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:25.532641Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:25.532717Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:25.532774Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:25.532877Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:25.532959Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:25.533017Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:25.534002Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:25.554316Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:25.554453Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:25.584043Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:08:25.586736Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:08:25.661629Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:08:25.667465Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:08:25.693242Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Describe result: PathErrorUnknown 2026-01-10T12:08:25.693321Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Creating table 2026-01-10T12:08:25.693415Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:08:25.701201Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:25.706011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:25.722005Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:25.722220Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:25.738947Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:26.017074Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:08:26.089063Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:26.857340Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:27.000960Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:27.001071Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Column diff is empty, finishing 2026-01-10T12:08:27.872797Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:08:51.501086Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [2:4524:3789]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:51.501505Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2026-01-10T12:08:51.501572Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [2:4524:3789], StatRequests.size() = 1 2026-01-10T12:08:52.553071Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [2:4560:3806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:52.553509Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-10T12:08:52.553569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [2:4560:3806], StatRequests.size() = 1 2026-01-10T12:08:53.629009Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:4594:3818]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:53.629384Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:08:53.629430Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4594:3818], StatRequests.size() = 1 2026-01-10T12:08:54.684987Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:08:54.696758Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4630:3836]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:54.697103Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:08:54.704207Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4630:3836], StatRequests.size() = 1 2026-01-10T12:08:55.772209Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4662:3851]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:55.772670Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:08:55.772727Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4662:3851], StatRequests.size() = 1 2026-01-10T12:08:56.703320Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:08:56.703525Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:08:56.704348Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:08:56.704719Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:08:56.717172Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4692:3863]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:56.717558Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:08:56.717608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4692:3863], StatRequests.size() = 1 2026-01-10T12:08:57.708864Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4727:3879]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:57.709733Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:08:57.709834Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4727:3879], StatRequests.size() = 1 2026-01-10T12:08:58.733783Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4759:3891]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:08:58.734128Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:08:58.734180Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4759:3891], StatRequests.size() = 1 2026-01-10T12:09:00.125325Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:00.185147Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4800:3908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:00.185494Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:09:00.185543Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4800:3908], StatRequests.size() = 1 2026-01-10T12:09:00.305274Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:09:00.305395Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:00.305446Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:00.305501Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:09:01.564873Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4835:3922]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:01.565252Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:09:01.565309Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4835:3922], StatRequests.size() = 1 2026-01-10T12:09:02.951112Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:02.951291Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-10T12:09:02.951865Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:09:02.952140Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:09:02.965870Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4866:3934]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:02.966209Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:09:02.966260Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4866:3934], StatRequests.size() = 1 2026-01-10T12:09:02.980523Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:02.980621Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:02.981033Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:09:03.005364Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:04.437382Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4900:3951]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:04.437737Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:09:04.437793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4900:3951], StatRequests.size() = 1 2026-01-10T12:09:05.078983Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:09:05.079069Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.611000s, at schemeshard: 72075186224037899 2026-01-10T12:09:05.079299Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-10T12:09:05.095446Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:05.761350Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4934:3967]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:05.761730Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:09:05.761781Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4934:3967], StatRequests.size() = 1 2026-01-10T12:09:06.987521Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:07.021179Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4968:3981]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:07.021490Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:09:07.021530Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4968:3981], StatRequests.size() = 1 2026-01-10T12:09:08.205489Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5004:3996]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:08.205873Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:09:08.205926Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5004:3996], StatRequests.size() = 1 2026-01-10T12:09:09.293265Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:09.293450Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-10T12:09:09.294160Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:09:09.294526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:09:09.309270Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5034:4008]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:09.309631Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:09:09.309683Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5034:4008], StatRequests.size() = 1 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> DataShardReadTableSnapshots::ReadTableDropColumn >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] Leader for TabletID 9437184 is [3:172:2144] sender: [3:173:2058] recipient: [3:164:2140] Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] Leader for TabletID 9437185 is [4:188:2100] sender: [4:189:2049] recipient: [4:167:2097] Leader for TabletID 9437184 is [3:172:2144] sender: [3:216:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:220:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:188:2100] sender: [3:223:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:221:2049] recipient: [4:161:2096] Leader for TabletID 9437185 is [4:188:2100] sender: [4:226:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [4:228:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [3:258:2058] recipient: [3:15:2062] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2026-01-10T12:08:15.222549Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703592090836759:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:15.222678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:15.522906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:15.623203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:15.623334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:15.631523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:15.646037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:15.676202Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703592090836551:2081] 1768046895204365 != 1768046895204368 2026-01-10T12:08:15.750549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:15.807034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:15.807058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:15.807065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:15.807150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:15.968241Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:15.969466Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:15.969494Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:15.970329Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22171, port: 22171 2026-01-10T12:08:15.971075Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:15.985196Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:16.033050Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****yucQ (232A3314) () has now valid token of ldapuser@ldap 2026-01-10T12:08:19.103513Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703611757875183:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:19.111841Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:19.269211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:19.275773Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:19.275899Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:19.337879Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:19.351848Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:19.563824Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:19.649346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.649370Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.649376Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.649452Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:19.928239Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:19.929530Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:19.929569Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:19.930306Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10577, port: 10577 2026-01-10T12:08:19.930408Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:19.946758Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:19.997543Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:19.998322Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:10577 return no entries 2026-01-10T12:08:19.998894Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****EuAw (1EEC95DA) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:10577 return no entries)' 2026-01-10T12:08:20.112095Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.676549Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703627878316284:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:23.681188Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:23.828277Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.869377Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593703627878316116:2081] 1768046903641846 != 1768046903641849 2026-01-10T12:08:23.869907Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:23.885178Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:23.885265Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:23.935154Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.021687Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:24.131907Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:24.131942Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:24.131951Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:24.132043Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.426687Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:08:24.436687Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:08:24.436724Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:08:24.437476Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26102, port: 26102 2026-01-10T12:08:24.437534Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:24.463480Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:24.508959Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:24.552533Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:24.553298Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:08:24.553356Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.597057Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.641544Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:24.673073Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ZJtA (5C015F5D) () has now valid token of ldapuser@ldap 2026-01-10T12:08:24.712374Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:28.666119Z node 3 :METADATA_PROVIDER ERROR: log. ... net)), attributes: memberOf 2026-01-10T12:08:49.297329Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:08:49.298857Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****4owg (46814F62) () has now valid token of ldapuser@ldap 2026-01-10T12:08:49.628241Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:52.601407Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****4owg (46814F62) 2026-01-10T12:08:52.601730Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:19918, port: 19918 2026-01-10T12:08:52.601798Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:08:52.665467Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:08:52.710317Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:08:52.711156Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:19918 return no entries 2026-01-10T12:08:52.711825Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****4owg (46814F62) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:19918 return no entries)' 2026-01-10T12:08:53.600577Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593703736233525101:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:53.600681Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:56.610917Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****4owg (46814F62) 2026-01-10T12:09:00.298697Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593703785769708362:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:00.298961Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:00.355598Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:00.523897Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:00.525115Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:00.525191Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:00.527267Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593703785769708114:2081] 1768046940220010 != 1768046940220013 2026-01-10T12:09:00.600291Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:00.620169Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:00.804899Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:00.804931Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:00.804938Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:00.805054Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:00.976272Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:09:00.978018Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:09:00.978055Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:09:00.979026Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14798, port: 14798 2026-01-10T12:09:00.979127Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:09:00.999520Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:09:01.042262Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:09:01.042888Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy 2026-01-10T12:09:01.043486Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****DSPQ (6EEAC77D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy)' 2026-01-10T12:09:01.043835Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:09:01.043864Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:09:01.044946Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14798, port: 14798 2026-01-10T12:09:01.045051Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:09:01.072609Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:09:01.133007Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:09:01.136655Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy 2026-01-10T12:09:01.137222Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****DSPQ (6EEAC77D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy)' 2026-01-10T12:09:01.302147Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:03.308251Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****DSPQ (6EEAC77D) 2026-01-10T12:09:03.308651Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:09:03.308676Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:09:03.310086Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14798, port: 14798 2026-01-10T12:09:03.310153Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:09:03.345654Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:09:03.389067Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:09:03.390517Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy 2026-01-10T12:09:03.391200Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****DSPQ (6EEAC77D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:14798. Server is busy)' 2026-01-10T12:09:05.298065Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7593703785769708362:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:05.298169Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:06.307155Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****DSPQ (6EEAC77D) 2026-01-10T12:09:06.307390Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:09:06.307409Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:09:06.307993Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14798, port: 14798 2026-01-10T12:09:06.308117Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-10T12:09:06.327820Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-10T12:09:06.376722Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-10T12:09:06.420573Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:09:06.421226Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-10T12:09:06.421284Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:09:06.468602Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:09:06.513023Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-10T12:09:06.516434Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****DSPQ (6EEAC77D) () has now valid token of ldapuser@ldap |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> DataShardReadTableSnapshots::ReadTableSplitBefore >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> DataShardReadTableSnapshots::ReadTableSnapshot >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::Pg [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental |85.7%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::GetAllStartPartitionSessions >> BasicUsage::PropagateSessionClosed |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> BasicUsage::RetryDiscoveryWithCancel |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> BasicUsage::WriteSessionNoAvailableDatabase |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> BasicUsage::WriteSessionCloseWaitsForWrites >> BasicUsage::FallbackToSingleDb >> BasicUsage::WriteSessionWriteInHandlers |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] |85.7%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder >> TTabletPipeTest::TestSendAfterOpen >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletResolver::NodeProblem >> TPipeCacheTest::TestIdleRefresh ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:122:2149] sender: [1:125:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:164:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:122:2149] sender: [1:167:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode |85.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor |85.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletResolver::NodeProblem [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TPipeCacheTest::TestTabletNode [GOOD] >> BasicStatistics::TwoTables [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation >> IncrementalBackup::SimpleBackupRestoreWithIndex [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TExportToS3Tests::AuditCompletedExport [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> TExportToS3Tests::AuditCancelledExport |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2026-01-10T12:09:18.962304Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.962510Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:216:2139] followers: 0 2026-01-10T12:09:18.962592Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:216:2139] 2026-01-10T12:09:18.962869Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.963105Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:222:2143] followers: 0 2026-01-10T12:09:18.963166Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:222:2143] 2026-01-10T12:09:18.964735Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:216:2139] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.964794Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:216:2139] 2026-01-10T12:09:18.964992Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:222:2143] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.965039Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:222:2143] 2026-01-10T12:09:18.965280Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 5 2026-01-10T12:09:18.965340Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:216:2139] by nodeId 2026-01-10T12:09:18.965407Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:216:2139] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.965450Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:09:18.965685Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:232:2096] followers: 0 2026-01-10T12:09:18.965764Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:232:2096] 2026-01-10T12:09:18.966158Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:222:2143] by nodeId 2026-01-10T12:09:18.966213Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:222:2143] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.966279Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:09:18.966503Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:238:2098] followers: 0 2026-01-10T12:09:18.966568Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:238:2098] 2026-01-10T12:09:18.968609Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2026-01-10T12:09:18.968680Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:232:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.968728Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:232:2096] 2026-01-10T12:09:18.968962Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:238:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.969012Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:238:2098] 2026-01-10T12:09:18.969248Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2026-01-10T12:09:18.969318Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:232:2096] by nodeId 2026-01-10T12:09:18.969373Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:232:2096] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.969424Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:09:18.969637Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:250:2096] followers: 0 2026-01-10T12:09:18.969702Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:250:2096] 2026-01-10T12:09:18.970180Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:238:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.970229Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:238:2098] 2026-01-10T12:09:18.970431Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 8 2026-01-10T12:09:18.970492Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:250:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.970535Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:250:2096] 2026-01-10T12:09:18.970788Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:238:2098] by nodeId 2026-01-10T12:09:18.970844Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:238:2098] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:09:18.970885Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:09:18.971130Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:256:2098] followers: 0 2026-01-10T12:09:18.971190Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:256:2098] >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:21.238591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:21.384301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:21.393695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:21.394173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:21.394299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:21.869580Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:22.020443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:22.020629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:22.063166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:22.180438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:23.301056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:23.301658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:23.301726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:23.301764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:23.302348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:23.384745Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:24.189611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:29.499949Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:29.509896Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:29.514313Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:29.608392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.608554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.669636Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:29.677784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.979629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.980503Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.981321Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.982153Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.982319Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.982653Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.982844Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.982968Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:30.003885Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:30.181381Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:08:30.242569Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:30.242740Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:30.262520Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:30.554189Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:30.628447Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:30.628557Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:30.733121Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:30.734910Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:30.735222Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:30.735292Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:30.735348Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:30.735419Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:30.735498Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:30.735556Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:30.737755Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:30.917519Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:30.917640Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:30.953440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:08:30.953836Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:08:31.217600Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:08:31.218135Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:08:31.222398Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:08:31.222486Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:08:31.222578Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:08:31.226787Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:31.232926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:31.241242Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:31.241387Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:31.256928Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:31.586196Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:31.908194Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:32.050591Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:32.050719Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:08:32.832441Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... lyToActorId = [2:3843:3398], StatRequests.size() = 1 2026-01-10T12:09:01.241172Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3882:3416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:01.241509Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:09:01.241559Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3882:3416], StatRequests.size() = 1 2026-01-10T12:09:02.445045Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3909:3426]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:02.445408Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:09:02.445457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3909:3426], StatRequests.size() = 1 2026-01-10T12:09:03.191156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:03.691657Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3940:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:03.692052Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:09:03.692130Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3940:3440], StatRequests.size() = 1 2026-01-10T12:09:04.903885Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3967:3450]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:04.907288Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:09:04.907381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3967:3450], StatRequests.size() = 1 2026-01-10T12:09:05.510086Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:05.510375Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:09:05.510808Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:09:05.511005Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:09:05.969137Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3994:3460]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:05.969515Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:09:05.969566Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3994:3460], StatRequests.size() = 1 2026-01-10T12:09:07.046804Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4023:3472]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:07.047165Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:09:07.047222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4023:3472], StatRequests.size() = 1 2026-01-10T12:09:08.126630Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4056:3484]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:08.127027Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:09:08.127078Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4056:3484], StatRequests.size() = 1 2026-01-10T12:09:08.935140Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:09:08.935238Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:08.935287Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:08.935342Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:09:09.036462Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:09.534969Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4090:3497]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:09.535308Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:09:09.535360Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4090:3497], StatRequests.size() = 1 2026-01-10T12:09:10.661232Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4121:3509]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:10.661555Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:09:10.661606Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4121:3509], StatRequests.size() = 1 2026-01-10T12:09:11.488808Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:11.489010Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-10T12:09:11.489527Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:09:11.489749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:09:11.608370Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:11.608468Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:11.608820Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-10T12:09:11.633131Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:12.174317Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4150:3521]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:12.174680Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:09:12.174736Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4150:3521], StatRequests.size() = 1 2026-01-10T12:09:13.424727Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4179:3533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:13.425042Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:09:13.425089Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4179:3533], StatRequests.size() = 1 2026-01-10T12:09:14.549154Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4206:3543]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:14.549531Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:09:14.549585Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4206:3543], StatRequests.size() = 1 2026-01-10T12:09:15.328929Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:15.849123Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4241:3557]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:15.849469Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:09:15.849521Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4241:3557], StatRequests.size() = 1 2026-01-10T12:09:16.993451Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4268:3567]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:16.993839Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-10T12:09:16.993892Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4268:3567], StatRequests.size() = 1 2026-01-10T12:09:17.695200Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:09:17.695795Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:09:17.696207Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:09:17.696406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:09:17.776123Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:17.776225Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:17.776537Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-10T12:09:17.796384Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:18.217218Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4299:3581]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:18.217523Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-10T12:09:18.217566Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4299:3581], StatRequests.size() = 1 2026-01-10T12:09:18.217959Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 36 ], ReplyToActorId[ [2:4301:3583]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:18.221908Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 36 ] 2026-01-10T12:09:18.221987Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 36, ReplyToActorId = [2:4301:3583], StatRequests.size() = 1 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TResourceBroker::TestQueueWithConfigure >> TResourceBroker::TestErrors >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TResourceBroker::TestCounters >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TExportToS3Tests::AutoDropping >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2026-01-10T12:09:23.269562Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2026-01-10T12:09:23.720121Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2026-01-10T12:09:23.720345Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2026-01-10T12:09:23.720628Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-10T12:06:26.568229Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1768046786568195 2026-01-10T12:06:27.089786Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703129084660046:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:27.089856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:27.348159Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:27.393779Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703128411487107:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:27.393854Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:27.457535Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:27.879532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:27.888767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.060583Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.173572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.173638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:28.248228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.301731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:28.301861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:28.314420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:28.314507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:28.405653Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:06:28.405889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.412453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:28.507097Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:28.563189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:28.595261Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.681686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:28.798414Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011301s 2026-01-10T12:06:29.148801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00303c/r3tmp/yandexH6DJvX.tmp 2026-01-10T12:06:29.148825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00303c/r3tmp/yandexH6DJvX.tmp 2026-01-10T12:06:29.148995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00303c/r3tmp/yandexH6DJvX.tmp 2026-01-10T12:06:29.149107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:29.367273Z INFO: TTestServer started on Port 9152 GrpcPort 22607 PQClient connected to localhost:22607 2026-01-10T12:06:29.885954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:29.937231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:06:32.092262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703129084660046:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.092363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:32.396727Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703128411487107:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:32.396808Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:35.135646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703163444399772:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.135765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.136596Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703162771225748:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.136681Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.136979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703163444399789:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.137054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703163444399788:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.137098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.137509Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703162771225769:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.137565Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.138928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703162771225767:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.143815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:35.260689Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703162771225772:2139] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:06:35.268982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703163444399792:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:06:35.373612Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703163444399885:2994] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:35.899401Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703163444399895:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:35.907783Z node 2 :KQP_COMPILE_ACTOR ERROR: kq ... ingCommit(TMessage) 2026-01-10T12:09:20.563219Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:09:20.563230Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:09:20.563246Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:20.563284Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-10T12:09:20.563344Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:09:20.561434Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2026-01-10T12:09:20.564565Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:09:20.564599Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:09:20.564691Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:09:20.565199Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b98b2e1b-3da60c78-802878e0-3878e357_0 2026-01-10T12:09:20.568311Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1768046960568 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:20.568464Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b98b2e1b-3da60c78-802878e0-3878e357_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-10T12:09:20.569140Z :INFO: [] MessageGroupId [src] SessionId [src|b98b2e1b-3da60c78-802878e0-3878e357_0] Write session: close. Timeout = 0 ms 2026-01-10T12:09:20.569216Z :INFO: [] MessageGroupId [src] SessionId [src|b98b2e1b-3da60c78-802878e0-3878e357_0] Write session will now close 2026-01-10T12:09:20.569272Z :DEBUG: [] MessageGroupId [src] SessionId [src|b98b2e1b-3da60c78-802878e0-3878e357_0] Write session: aborting 2026-01-10T12:09:20.569847Z :INFO: [] MessageGroupId [src] SessionId [src|b98b2e1b-3da60c78-802878e0-3878e357_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:09:20.569901Z :DEBUG: [] MessageGroupId [src] SessionId [src|b98b2e1b-3da60c78-802878e0-3878e357_0] Write session: destroy 2026-01-10T12:09:20.572772Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|b98b2e1b-3da60c78-802878e0-3878e357_0 grpc read done: success: 0 data: 2026-01-10T12:09:20.572806Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|b98b2e1b-3da60c78-802878e0-3878e357_0 grpc read failed 2026-01-10T12:09:20.572849Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|b98b2e1b-3da60c78-802878e0-3878e357_0 grpc closed 2026-01-10T12:09:20.572872Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|b98b2e1b-3da60c78-802878e0-3878e357_0 is DEAD 2026-01-10T12:09:20.573872Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:09:20.576615Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [15:7593703873866570193:2473] destroyed 2026-01-10T12:09:20.576687Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:09:20.576728Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:20.576751Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.576770Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:20.576794Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.576811Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:20.628317Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:20.628377Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.628400Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:20.628430Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.628450Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:20.636424Z :INFO: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Starting read session 2026-01-10T12:09:20.636482Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Starting cluster discovery 2026-01-10T12:09:20.636782Z :INFO: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22948
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22948. " 2026-01-10T12:09:20.636824Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Restart cluster discovery in 0.008713s 2026-01-10T12:09:20.646313Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Starting cluster discovery 2026-01-10T12:09:20.646745Z :INFO: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22948
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22948. " 2026-01-10T12:09:20.646810Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Restart cluster discovery in 0.015086s 2026-01-10T12:09:20.662342Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Starting cluster discovery 2026-01-10T12:09:20.662553Z :INFO: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22948
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22948. " 2026-01-10T12:09:20.662610Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Restart cluster discovery in 0.021922s 2026-01-10T12:09:20.686186Z :DEBUG: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Starting cluster discovery 2026-01-10T12:09:20.686559Z :NOTICE: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22948
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22948. " } 2026-01-10T12:09:20.687744Z :NOTICE: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22948
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22948. " } 2026-01-10T12:09:20.687892Z :INFO: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Closing read session. Close timeout: 0.000000s 2026-01-10T12:09:20.688011Z :NOTICE: [/Root] [/Root] [e38f8868-a92af39c-7566d80e-4dea3519] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:09:20.732352Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:20.732401Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.732423Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:20.732450Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.732469Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:20.836648Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:20.836699Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.836719Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:20.836742Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.836759Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:20.939036Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:20.939083Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.939103Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:20.939126Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:20.939143Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TTabletPipeTest::TestOpen |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestOpen [GOOD] |85.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> TExportToS3Tests::AutoDropping [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries >> IncrementalBackup::E2EMultipleBackupRestoreCycles >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless [GOOD] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen >> TabletState::NormalLifecycle >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> TResourceBrokerInstant::TestMerge >> TResourceBrokerInstant::Test >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> TabletState::NormalLifecycle [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletCountersAggregator::ColumnShardCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for boot2 ... waiting for connect2 >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> TResourceBrokerInstant::TestErrors |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicStatistics::DedicatedTimeIntervals [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> BasicUsage::PreferredDatabaseNoFallback |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> ConfiguredTabletBootstrapperTest::BasicInitialization >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> TTabletPipeTest::TestShutdown >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> THealthCheckTest::TestStateStorageRed [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:17.738661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:17.887655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:17.901344Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:17.901917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.902043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:18.387653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:18.387839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:18.394940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046953580590 != 1768046953580593 2026-01-10T12:09:18.414267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:18.473419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:18.603243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:18.941689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:18.957164Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:19.071844Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:19.071992Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:19.073832Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:09:19.194493Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:19.194613Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:19.195300Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:19.195430Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:19.195840Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:19.196021Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:19.196593Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:19.198775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:19.199340Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:19.200241Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:19.200329Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:19.242564Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:19.243776Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:19.244120Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:09:19.244424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:19.304573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:19.305455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:19.305578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:19.307439Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:19.307600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:19.307682Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:19.308632Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:19.308814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:19.308919Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:09:19.320739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:19.369452Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:19.369711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:19.369884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:09:19.369969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:19.370017Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:19.370061Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:19.370378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.370456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.370896Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:19.371046Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:19.371166Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:19.371212Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:19.371260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:19.371314Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:19.371352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:19.371387Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:19.371422Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:19.371514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.371569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.371626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:09:19.371729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:09:19.371780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:19.371894Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:19.372181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:19.372249Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:19.372426Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... on unit ReadTableScan 2026-01-10T12:09:26.983866Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-10T12:09:26.984046Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2026-01-10T12:09:26.984069Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:26.984106Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:09:26.984129Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:26.984152Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:26.984196Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:26.984522Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1073:2890], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:26.984563Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:26.984668Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-10T12:09:26.984818Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.984847Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-10T12:09:26.984899Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.985026Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.985098Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-10T12:09:26.985160Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-10T12:09:26.985196Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-10T12:09:26.985450Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.985472Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-10T12:09:26.985503Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.985552Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.985632Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-10T12:09:26.985666Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-10T12:09:26.985689Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-10T12:09:26.985845Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.985862Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-10T12:09:26.985886Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.985923Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.985952Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-10T12:09:26.985980Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-10T12:09:26.986002Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-10T12:09:26.986155Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.986176Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-10T12:09:26.986202Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.986241Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.986349Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-10T12:09:26.986385Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-10T12:09:26.986443Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:09:26.986475Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037888 2026-01-10T12:09:26.986613Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.986654Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.986708Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:26.986749Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:26.986787Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2026-01-10T12:09:26.986823Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-10T12:09:26.986878Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2026-01-10T12:09:26.986970Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-10T12:09:26.987040Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2026-01-10T12:09:26.987084Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:09:26.987139Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-10T12:09:26.987194Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2026-01-10T12:09:26.987233Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:09:26.987282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:09:26.987335Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:09:26.987395Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-10T12:09:26.987425Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:09:26.987455Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2026-01-10T12:09:26.987499Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:26.987538Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:09:26.987576Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:26.987614Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:26.987703Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:26.987768Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-10T12:09:26.987875Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:09:26.987976Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:26.988201Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2026-01-10T12:09:26.988274Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014180s execute time: 0.102997s total time: 0.117177s 2026-01-10T12:09:26.988535Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] Test command err: 2026-01-10T12:09:20.245679Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [2:8:2055] 2026-01-10T12:09:20.245942Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:9:2056] worker 0 2026-01-10T12:09:20.245992Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:10:2057] worker 1 2026-01-10T12:09:20.246021Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:11:2058] worker 2 2026-01-10T12:09:20.246050Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:12:2059] worker 3 2026-01-10T12:09:20.246086Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:13:2060] worker 4 2026-01-10T12:09:20.246119Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:14:2061] worker 5 2026-01-10T12:09:20.246149Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:15:2062] worker 6 2026-01-10T12:09:20.246198Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:16:2063] worker 7 2026-01-10T12:09:20.246245Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:17:2064] worker 8 2026-01-10T12:09:20.246272Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:18:2065] worker 9 Sending message to [2:10:2057] from [2:8:2055] id 1 Sending message to [2:11:2058] from [2:8:2055] id 2 Sending message to [2:12:2059] from [2:8:2055] id 3 Sending message to [2:13:2060] from [2:8:2055] id 4 Sending message to [2:14:2061] from [2:8:2055] id 5 Sending message to [2:15:2062] from [2:8:2055] id 6 Sending message to [2:16:2063] from [2:8:2055] id 7 Sending message to [2:17:2064] from [2:8:2055] id 8 Sending message to [2:18:2065] from [2:8:2055] id 9 Sending message to [2:9:2056] from [2:8:2055] id 10 2026-01-10T12:09:21.753445Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [2:13:2060] 2026-01-10T12:09:21.753544Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [2:14:2061] 2026-01-10T12:09:21.753624Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [2:15:2062] 2026-01-10T12:09:21.753675Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [2:16:2063] 2026-01-10T12:09:21.753731Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [2:17:2064] 2026-01-10T12:09:21.753790Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [2:18:2065] 2026-01-10T12:09:21.754095Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [2:9:2056] 2026-01-10T12:09:21.754173Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [2:10:2057] 2026-01-10T12:09:21.754212Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [2:11:2058] 2026-01-10T12:09:21.754273Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [2:12:2059] 2026-01-10T12:09:21.754330Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:14:2061] 2026-01-10T12:09:21.756117Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:14:2061] 2026-01-10T12:09:21.793363Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:14:2061] Initiator [2:8:2055] 2026-01-10T12:09:21.826732Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:15:2062] 2026-01-10T12:09:21.828354Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:15:2062] 2026-01-10T12:09:21.862838Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:15:2062] Initiator [2:8:2055] 2026-01-10T12:09:21.908507Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:16:2063] 2026-01-10T12:09:21.910033Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:16:2063] 2026-01-10T12:09:21.941885Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:16:2063] Initiator [2:8:2055] 2026-01-10T12:09:21.968957Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:17:2064] 2026-01-10T12:09:21.970793Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:17:2064] 2026-01-10T12:09:22.006015Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:17:2064] Initiator [2:8:2055] 2026-01-10T12:09:22.047071Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:18:2065] 2026-01-10T12:09:22.048927Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:18:2065] 2026-01-10T12:09:22.084122Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:18:2065] Initiator [2:8:2055] 2026-01-10T12:09:22.111738Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:8:2055] 2026-01-10T12:09:22.111905Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:8:2055] 2026-01-10T12:09:22.117563Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [2:9:2056] 2026-01-10T12:09:22.119376Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [2:9:2056] 2026-01-10T12:09:22.164599Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:9:2056] Initiator [2:8:2055] 2026-01-10T12:09:22.190676Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:10:2057] 2026-01-10T12:09:22.192427Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:10:2057] 2026-01-10T12:09:22.237491Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:10:2057] Initiator [2:8:2055] 2026-01-10T12:09:22.270328Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:11:2058] 2026-01-10T12:09:22.272159Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:11:2058] 2026-01-10T12:09:22.307540Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:11:2058] Initiator [2:8:2055] 2026-01-10T12:09:22.333449Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:12:2059] 2026-01-10T12:09:22.335164Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:12:2059] 2026-01-10T12:09:22.373430Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:12:2059] Initiator [2:8:2055] 2026-01-10T12:09:22.399507Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:13:2060] 2026-01-10T12:09:22.401485Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:13:2060] 2026-01-10T12:09:22.436333Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:13:2060] Initiator [2:8:2055] 2026-01-10T12:09:22.463374Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:8:2055] 2026-01-10T12:09:22.463546Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:8:2055] 2026-01-10T12:09:22.470227Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:8:2055] 2026-01-10T12:09:22.470399Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:8:2055] 2026-01-10T12:09:22.476055Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:8:2055] 2026-01-10T12:09:22.476257Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:8:2055] 2026-01-10T12:09:22.483323Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:8:2055] 2026-01-10T12:09:22.483490Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:8:2055] 2026-01-10T12:09:22.489567Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [2:8:2055] 2026-01-10T12:09:22.489732Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [2:8:2055] 2026-01-10T12:09:22.495878Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:8:2055] 2026-01-10T12:09:22.496048Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:8:2055] 2026-01-10T12:09:22.504656Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:8:2055] 2026-01-10T12:09:22.504882Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:8:2055] 2026-01-10T12:09:22.510800Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:8:2055] 2026-01-10T12:09:22.510956Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:8:2055] 2026-01-10T12:09:22.517083Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:8:2055] 2026-01-10T12:09:22.517239Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:8:2055] 2026-01-10T12:09:22.523140Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:8:2055] Initiator [2:7:2054] TEST 2 10 duration 2.447384s 2026-01-10T12:09:22.783548Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [3:8:2055] 2026-01-10T12:09:22.784049Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:9:2056] worker 0 2026-01-10T12:09:22.784134Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:10:2057] worker 1 2026-01-10T12:09:22.784175Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_ag ... or got response node 8 [3:8:2055] 2026-01-10T12:09:24.242426Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [3:8:2055] 2026-01-10T12:09:24.247720Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [3:8:2055] 2026-01-10T12:09:24.247873Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [3:8:2055] 2026-01-10T12:09:24.253307Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [3:8:2055] 2026-01-10T12:09:24.253466Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [3:8:2055] 2026-01-10T12:09:24.258832Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [3:8:2055] Initiator [3:7:2054] TEST 2 20 duration 1.654136s 2026-01-10T12:09:24.536918Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [4:8:2055] 2026-01-10T12:09:24.537057Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [4:8:2055] self [4:9:2056] worker 0 Sending message to [4:9:2056] from [4:8:2055] id 1 Sending message to [4:9:2056] from [4:8:2055] id 2 Sending message to [4:9:2056] from [4:8:2055] id 3 Sending message to [4:9:2056] from [4:8:2055] id 4 Sending message to [4:9:2056] from [4:8:2055] id 5 Sending message to [4:9:2056] from [4:8:2055] id 6 Sending message to [4:9:2056] from [4:8:2055] id 7 Sending message to [4:9:2056] from [4:8:2055] id 8 Sending message to [4:9:2056] from [4:8:2055] id 9 Sending message to [4:9:2056] from [4:8:2055] id 10 2026-01-10T12:09:25.449962Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [4:9:2056] 2026-01-10T12:09:25.450024Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [4:9:2056] 2026-01-10T12:09:25.450050Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [4:9:2056] 2026-01-10T12:09:25.450085Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [4:9:2056] 2026-01-10T12:09:25.450179Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [4:9:2056] 2026-01-10T12:09:25.450217Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [4:9:2056] 2026-01-10T12:09:25.450251Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [4:9:2056] 2026-01-10T12:09:25.450282Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [4:9:2056] 2026-01-10T12:09:25.450319Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [4:9:2056] 2026-01-10T12:09:25.450356Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [4:9:2056] 2026-01-10T12:09:25.450626Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [4:9:2056] 2026-01-10T12:09:25.452261Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [4:9:2056] 2026-01-10T12:09:25.486160Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [4:9:2056] 2026-01-10T12:09:25.487744Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [4:9:2056] 2026-01-10T12:09:25.523654Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [4:9:2056] 2026-01-10T12:09:25.525385Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [4:9:2056] 2026-01-10T12:09:25.561774Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [4:9:2056] 2026-01-10T12:09:25.563499Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [4:9:2056] 2026-01-10T12:09:25.599084Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [4:9:2056] 2026-01-10T12:09:25.600877Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [4:9:2056] 2026-01-10T12:09:25.639573Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [4:9:2056] 2026-01-10T12:09:25.641344Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [4:9:2056] 2026-01-10T12:09:25.674370Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [4:9:2056] 2026-01-10T12:09:25.676173Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [4:9:2056] 2026-01-10T12:09:25.708680Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [4:9:2056] 2026-01-10T12:09:25.710300Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [4:9:2056] 2026-01-10T12:09:25.735951Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [4:9:2056] 2026-01-10T12:09:25.737224Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [4:9:2056] 2026-01-10T12:09:25.763417Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [4:9:2056] 2026-01-10T12:09:25.764509Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [4:9:2056] 2026-01-10T12:09:25.807668Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:9:2056] Initiator [4:8:2055] 2026-01-10T12:09:26.121846Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [4:8:2055] 2026-01-10T12:09:26.122877Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [4:8:2055] 2026-01-10T12:09:26.175196Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:8:2055] Initiator [4:7:2054] TEST 2 1 duration 1.858373s 2026-01-10T12:09:26.440685Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [5:7:2054] self [5:8:2055] worker 0 Sending message to [5:8:2055] from [5:8:2055] id 1 Sending message to [5:8:2055] from [5:8:2055] id 2 Sending message to [5:8:2055] from [5:8:2055] id 3 Sending message to [5:8:2055] from [5:8:2055] id 4 Sending message to [5:8:2055] from [5:8:2055] id 5 Sending message to [5:8:2055] from [5:8:2055] id 6 Sending message to [5:8:2055] from [5:8:2055] id 7 Sending message to [5:8:2055] from [5:8:2055] id 8 Sending message to [5:8:2055] from [5:8:2055] id 9 Sending message to [5:8:2055] from [5:8:2055] id 10 2026-01-10T12:09:27.275727Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [5:8:2055] 2026-01-10T12:09:27.275787Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [5:8:2055] 2026-01-10T12:09:27.275810Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [5:8:2055] 2026-01-10T12:09:27.275837Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [5:8:2055] 2026-01-10T12:09:27.275866Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [5:8:2055] 2026-01-10T12:09:27.275935Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [5:8:2055] 2026-01-10T12:09:27.275965Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [5:8:2055] 2026-01-10T12:09:27.275994Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [5:8:2055] 2026-01-10T12:09:27.276019Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [5:8:2055] 2026-01-10T12:09:27.276055Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [5:8:2055] 2026-01-10T12:09:27.276285Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [5:8:2055] 2026-01-10T12:09:27.277929Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [5:8:2055] 2026-01-10T12:09:27.309363Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [5:8:2055] 2026-01-10T12:09:27.310976Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [5:8:2055] 2026-01-10T12:09:27.342858Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [5:8:2055] 2026-01-10T12:09:27.348348Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [5:8:2055] 2026-01-10T12:09:27.386175Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [5:8:2055] 2026-01-10T12:09:27.388216Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [5:8:2055] 2026-01-10T12:09:27.423453Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [5:8:2055] 2026-01-10T12:09:27.425120Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [5:8:2055] 2026-01-10T12:09:27.473646Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [5:8:2055] 2026-01-10T12:09:27.475474Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [5:8:2055] 2026-01-10T12:09:27.509322Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [5:8:2055] 2026-01-10T12:09:27.511394Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [5:8:2055] 2026-01-10T12:09:27.547687Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [5:8:2055] 2026-01-10T12:09:27.549166Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [5:8:2055] 2026-01-10T12:09:27.576809Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [5:8:2055] 2026-01-10T12:09:27.578641Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [5:8:2055] 2026-01-10T12:09:27.606754Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [5:8:2055] 2026-01-10T12:09:27.608469Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [5:8:2055] 2026-01-10T12:09:27.667614Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [5:8:2055] Initiator [5:7:2054] TEST 2 1 duration 1.701920s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:17.636695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:17.782465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:17.792752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:17.793274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.793418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:18.222953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:18.223146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:18.230758Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046953792309 != 1768046953792312 2026-01-10T12:09:18.248392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:18.301555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:18.419251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:18.779621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:18.796432Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:18.909207Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:18.909288Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:18.909473Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:09:19.054821Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:19.054924Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:19.055611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:19.055711Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:19.056087Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:19.056267Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:19.056411Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:19.058427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:19.058920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:19.059704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:19.059784Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:19.093241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:19.094368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:19.094697Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:09:19.094972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:19.160980Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:19.161945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:19.162071Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:19.164010Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:19.164116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:19.164210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:19.164634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:19.164777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:19.164863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:09:19.175718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:19.278061Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:19.278333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:19.278469Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:09:19.278513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:19.278557Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:19.278596Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:19.278874Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.278929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.279375Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:19.279487Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:19.279592Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:19.279672Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:19.279739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:19.279789Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:19.279833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:19.279881Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:19.279941Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:19.280066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.284227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.284301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:09:19.284449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:09:19.284504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:19.284664Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:19.284970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:19.285049Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:19.285183Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... ead_table_scan.cpp:662: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:27.211294Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1545:3255], Recipient [2:1277:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2026-01-10T12:09:27.211320Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1277:3043] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2026-01-10T12:09:27.211343Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1277:3043] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1545:3255] ShardId# 72075186224037896 2026-01-10T12:09:27.211408Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2026-01-10T12:09:27.211475Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1545:3255], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2026-01-10T12:09:27.211495Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1277:3043] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2026-01-10T12:09:27.211760Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1276:3043], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-10T12:09:27.211790Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1277:3043] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:27.211811Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1277:3043] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2026-01-10T12:09:27.211847Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-10T12:09:27.211900Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-10T12:09:27.212026Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1545:3255], Recipient [2:1277:3043]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2026-01-10T12:09:27.212050Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1277:3043] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2026-01-10T12:09:27.212071Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1277:3043] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2026-01-10T12:09:27.212140Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037896 2026-01-10T12:09:27.212160Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037896 2026-01-10T12:09:27.212246Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1446:3176], Recipient [2:1446:3176]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:27.212273Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:27.212307Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037896 2026-01-10T12:09:27.212330Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:27.212356Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2026-01-10T12:09:27.212392Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2026-01-10T12:09:27.212419Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2026-01-10T12:09:27.212447Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-10T12:09:27.212469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2026-01-10T12:09:27.212489Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2026-01-10T12:09:27.212512Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-10T12:09:27.212538Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2026-01-10T12:09:27.212558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2026-01-10T12:09:27.212578Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2026-01-10T12:09:27.212598Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2026-01-10T12:09:27.212629Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-10T12:09:27.212649Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2026-01-10T12:09:27.212664Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2026-01-10T12:09:27.212694Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:27.212719Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037896 2026-01-10T12:09:27.212738Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037896 has no attached operations 2026-01-10T12:09:27.212757Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037896 2026-01-10T12:09:27.212795Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037896 2026-01-10T12:09:27.212818Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-10T12:09:27.212858Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:09:27.212907Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2026-01-10T12:09:27.213110Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1446:3176], Recipient [2:1277:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 348 } } CommitVersion { Step: 0 TxId: 281474976715664 } 2026-01-10T12:09:27.213136Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1277:3043] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2026-01-10T12:09:27.213185Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1277:3043] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017163s execute time: 0.581048s total time: 0.598211s 2026-01-10T12:09:27.213511Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1083:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.213609Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1190:2977]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.213912Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1194:2979]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.214111Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1441:3174]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.214284Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1548:3258], Recipient [2:1335:3092]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:27.214310Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:27.214358Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037893, clientId# [2:1546:3256], serverId# [2:1548:3258], sessionId# [0:0:0] 2026-01-10T12:09:27.214512Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1335:3092]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.214608Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1549:3259], Recipient [2:1337:3094]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:27.214627Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:27.214648Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1547:3257], serverId# [2:1549:3259], sessionId# [0:0:0] 2026-01-10T12:09:27.214689Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1446:3176]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-10T12:09:27.214800Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3043], Recipient [2:1337:3094]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2026-01-10T12:09:28.331739Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> BasicUsage::WriteSessionCloseIgnoresWrites >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> ConfiguredTabletBootstrapperTest::BasicInitialization [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> IncrementalBackup::ResetVsUpsertColumnStateSerialization >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> TTabletPipeTest::TestShutdown [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] >> TTabletPipeTest::TestTwoNodes >> TTabletPipeTest::TestTwoNodes [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:16.458482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:16.673103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:16.683047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:16.683602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:16.683721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:17.223203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.223361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.234580Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046952872030 != 1768046952872033 2026-01-10T12:09:17.276862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:17.337502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.459074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:17.818531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:17.843902Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.954253Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:17.954390Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:17.954518Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:09:18.086309Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:18.086430Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:18.087144Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:18.087280Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:18.087690Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:18.087862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:18.088044Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:18.090481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:18.091051Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:18.091912Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:18.092025Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:18.139523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:18.140781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:18.141156Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:09:18.141468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:18.198265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:18.199138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:18.199262Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:18.201546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:18.201638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:18.201717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:18.202160Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:18.202315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:18.202430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:09:18.216790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:18.346525Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:18.346767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:18.346948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:09:18.346989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:18.347041Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:18.347082Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:18.347353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:18.347410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:18.347787Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:18.347911Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:18.348016Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:18.348066Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:18.352611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:18.352687Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:18.352728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:18.352772Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:18.352848Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:18.353030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:18.353076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:18.353128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:09:18.353257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:09:18.353309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:18.353459Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:18.353718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:18.353779Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:18.353901Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:09:26.545134Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1195:2985], Recipient [2:1085:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:26.545164Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:26.545228Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1195:2985], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-10T12:09:26.545250Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-10T12:09:26.545276Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1195:2985] 2026-01-10T12:09:26.545357Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-10T12:09:26.545626Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.545721Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1195:2985], Recipient [2:1169:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-10T12:09:26.545746Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-10T12:09:26.545767Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1195:2985] ShardId# 72075186224037890 2026-01-10T12:09:26.545804Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-10T12:09:26.545849Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1195:2985], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-10T12:09:26.545869Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-10T12:09:26.546068Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1168:2961], Recipient [2:1169:2961]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-10T12:09:26.546091Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1169:2961] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.546111Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1195:2985] 2026-01-10T12:09:26.546149Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-10T12:09:26.546195Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.546283Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1195:2985], Recipient [2:1169:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2026-01-10T12:09:26.546304Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-10T12:09:26.546323Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1195:2985] ShardId# 72075186224037890 2026-01-10T12:09:26.546375Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1169:2961] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.016309s execute time: 0.146464s total time: 0.162773s 2026-01-10T12:09:26.546516Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-10T12:09:26.546554Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2026-01-10T12:09:26.546844Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2961], Recipient [2:1083:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 2026-01-10T12:09:26.547008Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:09:26.547032Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037890 2026-01-10T12:09:26.547162Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1085:2897], Recipient [2:1085:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.547190Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.547224Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:09:26.547247Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:26.547276Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2026-01-10T12:09:26.547297Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:09:26.547328Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2026-01-10T12:09:26.547366Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-10T12:09:26.547389Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2026-01-10T12:09:26.547409Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2026-01-10T12:09:26.547429Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-10T12:09:26.547454Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2026-01-10T12:09:26.547475Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2026-01-10T12:09:26.547495Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:09:26.547517Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:09:26.547549Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-10T12:09:26.547569Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:09:26.547586Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2026-01-10T12:09:26.547607Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:26.547635Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-10T12:09:26.547663Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-10T12:09:26.547687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:09:26.547727Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:09:26.547748Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-10T12:09:26.547774Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:09:26.547820Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:09:26.548020Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549569, Sender [2:1169:2961], Recipient [2:1085:2897]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2026-01-10T12:09:26.548055Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2026-01-10T12:09:26.548128Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2026-01-10T12:09:26.548172Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2026-01-10T12:09:26.548333Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287431, Sender [2:1169:2961], Recipient [2:1085:2897]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2026-01-10T12:09:26.548364Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2026-01-10T12:09:26.548453Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2961], Recipient [2:1085:2897]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:17.420201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:17.570939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:17.581791Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:17.582323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.582455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:18.001270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:18.001418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:18.008053Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046953936188 != 1768046953936191 2026-01-10T12:09:18.032854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:18.090073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:18.203230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:18.546881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:18.564754Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:18.688454Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:18.688557Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:18.688775Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:09:18.829666Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:18.829777Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:18.830402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:18.830498Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:18.830841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:18.830989Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:18.831158Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:18.833319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:18.833870Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:18.834682Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:18.834763Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:18.883203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:18.885790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:18.886156Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:09:18.886419Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:18.936582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:18.937484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:18.937605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:18.939375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:18.939455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:18.939526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:18.939938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:18.940109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:18.940205Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:09:18.952683Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:19.012688Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:19.012946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:19.013102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:09:19.013143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:19.013186Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:19.013221Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:19.013488Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.013545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:19.013927Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:19.014021Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:19.014107Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:19.014183Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:19.014249Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:19.014307Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:19.014345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:19.014380Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:19.014429Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:19.014541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.014589Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:19.014634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:09:19.014734Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:09:19.014773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:19.014875Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:19.015194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:19.015250Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:19.015371Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... sactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2026-01-10T12:09:26.695742Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-10T12:09:26.695776Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1196:2985] ShardId# 72075186224037890 2026-01-10T12:09:26.695842Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-10T12:09:26.695915Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2985], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-10T12:09:26.695944Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-10T12:09:26.696533Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2879], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-10T12:09:26.696575Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.696604Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-10T12:09:26.696652Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.696747Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:09:26.696909Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2985], Recipient [2:1061:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-10T12:09:26.696941Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-10T12:09:26.696970Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1196:2985] ShardId# 72075186224037890 2026-01-10T12:09:26.697055Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2985], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-10T12:09:26.697082Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-10T12:09:26.697121Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-10T12:09:26.697477Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2879], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-10T12:09:26.697515Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:26.697545Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-10T12:09:26.697593Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.697661Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-10T12:09:26.697853Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1196:2985], Recipient [2:1061:2879]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2026-01-10T12:09:26.697885Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2026-01-10T12:09:26.697915Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2026-01-10T12:09:26.697982Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:09:26.698418Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037890 2026-01-10T12:09:26.698542Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1100:2910], Recipient [2:1100:2910]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.698576Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:26.698628Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:09:26.698662Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:26.698699Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2026-01-10T12:09:26.698730Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:09:26.698764Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2026-01-10T12:09:26.698805Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-10T12:09:26.698836Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2026-01-10T12:09:26.698865Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2026-01-10T12:09:26.698896Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-10T12:09:26.698932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2026-01-10T12:09:26.698962Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2026-01-10T12:09:26.698994Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:09:26.699023Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:09:26.699077Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-10T12:09:26.699120Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:09:26.699147Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2026-01-10T12:09:26.699176Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:26.699205Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-10T12:09:26.699234Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-10T12:09:26.699262Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:09:26.699322Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:09:26.699358Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-10T12:09:26.699400Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:09:26.699467Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:09:26.699784Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1100:2910], Recipient [2:1061:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 347 } } CommitVersion { Step: 0 TxId: 281474976715662 } 2026-01-10T12:09:26.699825Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2026-01-10T12:09:26.699896Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014225s execute time: 0.313012s total time: 0.327237s 2026-01-10T12:09:26.700306Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-10T12:09:26.700544Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:1098:2908]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-10T12:09:26.700862Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2879], Recipient [2:1100:2910]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:21.045374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:21.177752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:21.189207Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:21.189677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:21.189889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:21.792768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.971705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:21.971856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:22.018023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:22.162692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:23.268943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:23.269751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:23.269831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:23.269872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:23.270561Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:23.392669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:24.265168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:28.985593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:29.000447Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-10T12:08:29.006281Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:29.057038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.057193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.102148Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:08:29.108579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.374851Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.374953Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.376678Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.377349Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.381195Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.382350Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.382496Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.382706Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.382942Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.383033Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.383167Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.399281Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.739698Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.808217Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:29.808354Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:29.888833Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:29.890303Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:29.890526Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:29.891875Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:29.891954Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:29.892045Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:29.892224Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:29.892306Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:29.893029Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:29.894518Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:1991:2455] 2026-01-10T12:08:29.901357Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-10T12:08:29.907992Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Describe result: PathErrorUnknown 2026-01-10T12:08:29.908058Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Creating table 2026-01-10T12:08:29.908171Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-10T12:08:29.936021Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:29.936180Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2226:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:29.982860Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2269:2636] 2026-01-10T12:08:29.983605Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2269:2636], schemeshard id = 72075186224037897 2026-01-10T12:08:29.985742Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2273:2637], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:30.031247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:30.081498Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:30.081660Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:30.096963Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:30.218199Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-10T12:08:30.468155Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:31.015359Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:31.176118Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:31.176218Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2198:2580] Owner: [3:2197:2579]. Column diff is empty, finishing 2026-01-10T12:08:32.028611Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWake ... e 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5938:3384], StatRequests.size() = 1 2026-01-10T12:09:14.586682Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:09:14.586782Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:14.586837Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:14.586879Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:09:14.614525Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5987:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:14.615247Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:09:14.615297Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5987:3397], StatRequests.size() = 1 2026-01-10T12:09:15.684606Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:16.131814Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6035:3409]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:16.140647Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:09:16.140770Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6035:3409], StatRequests.size() = 1 2026-01-10T12:09:17.820753Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:09:17.821013Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 10 2026-01-10T12:09:17.821210Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-10T12:09:17.821433Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:09:17.845274Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:17.845366Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:17.845702Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:09:17.862112Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:17.980935Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6079:3421]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:17.981387Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:09:17.981462Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6079:3421], StatRequests.size() = 1 2026-01-10T12:09:19.651860Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6125:3433]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:19.655490Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:09:19.655563Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6125:3433], StatRequests.size() = 1 2026-01-10T12:09:20.785118Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-10T12:09:20.785215Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-10T12:09:20.785536Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:09:20.803723Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-10T12:09:21.129156Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:21.567772Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6166:3443]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:21.568247Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:09:21.568298Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6166:3443], StatRequests.size() = 1 2026-01-10T12:09:23.068916Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:09:23.069400Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 11 2026-01-10T12:09:23.069857Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-10T12:09:23.069943Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:09:23.108960Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:23.109032Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:23.109339Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:09:23.145802Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:23.236145Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6211:3455]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:23.236521Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:09:23.236571Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6211:3455], StatRequests.size() = 1 2026-01-10T12:09:23.237108Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6213:3391]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:23.241241Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:09:23.241348Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6223:3395] 2026-01-10T12:09:23.241413Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6223:3395] 2026-01-10T12:09:23.242592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6223:3395], server id = [2:6229:3396], tablet id = 72075186224038895, status = OK 2026-01-10T12:09:23.242669Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6229:3396] 2026-01-10T12:09:23.242895Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6229:3396], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:09:23.242988Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:09:23.243308Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:09:23.243397Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6213:3391], StatRequests.size() = 1 2026-01-10T12:09:23.243468Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:09:23.385670Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224038895] EvFastPropagateCheck 2026-01-10T12:09:23.385798Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:09:23.512959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6223:3395], schemeshard count = 1 2026-01-10T12:09:25.579392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:09:25.579659Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-10T12:09:25.580510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 1 2026-01-10T12:09:25.626715Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-10T12:09:25.626810Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-10T12:09:25.627143Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:09:25.641841Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-10T12:09:25.837651Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:27.541012Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:09:27.541612Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 12 2026-01-10T12:09:27.542167Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:09:27.542420Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-10T12:09:27.585131Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:27.585222Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:27.585599Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:09:27.602481Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:28.079795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224038895] EvPropagateTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2026-01-10T12:09:15.796161Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1768046955796129 2026-01-10T12:09:16.590939Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703855822153674:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.591032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:16.685098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:16.712370Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:16.875407Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:17.187066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.211107Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.211310Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.400290Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.504570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.709244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.709339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.728389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.728510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.796908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.845544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.853327Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.904837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:17.907624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.917925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.982393Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:17.997268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:18.014260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:18.599925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002280/r3tmp/yandexPfEFNH.tmp 2026-01-10T12:09:18.599952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002280/r3tmp/yandexPfEFNH.tmp 2026-01-10T12:09:18.600161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002280/r3tmp/yandexPfEFNH.tmp 2026-01-10T12:09:18.600271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:18.664335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:18.785860Z INFO: TTestServer started on Port 17691 GrpcPort 19132 PQClient connected to localhost:19132 2026-01-10T12:09:19.332633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:21.592222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703855822153674:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:21.592321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:22.802568Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703883855572311:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.802700Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703883855572322:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.802792Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.807752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703883855572336:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.807877Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.809331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703883855572344:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.809400Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.823264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:22.878812Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703883855572325:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:23.196387Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703883855572357:2145] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:23.223261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:23.240184Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703881591958648:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:23.240701Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjRhMGQzNS1lYWRmMTE5Ny1kZWU1NTAxOC1iODhhZGRjNg==, ActorId: [1:7593703881591958578:2331], ActorState: ExecuteState, LegacyTraceId: 01kekwza54b44hjazhp9v32h9e, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:23.244714Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ ... tion_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:09:30.465560Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2026-01-10T12:09:30.465799Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-10T12:09:30.466515Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:09:30.466683Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:09:30.466715Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2026-01-10T12:09:30.466743Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2026-01-10T12:09:30.467064Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2026-01-10T12:09:30.467525Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: try to update token 2026-01-10T12:09:30.467569Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2026-01-10T12:09:30.468065Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0 grpc read done: success: 1 data: write_request[data omitted] 2026-01-10T12:09:30.468279Z :INFO: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-10T12:09:30.468320Z :INFO: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session will now close 2026-01-10T12:09:30.468316Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2026-01-10T12:09:30.468381Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: aborting 2026-01-10T12:09:30.468659Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:09:30.468695Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:09:30.468752Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 4 requestId: cookie: 3 2026-01-10T12:09:30.468990Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-10T12:09:30.469173Z :WARNING: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2026-01-10T12:09:30.469241Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:09:30.469304Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session is aborting and will not restart 2026-01-10T12:09:30.469359Z :DEBUG: [/Root] TraceId [] SessionId [src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0] MessageGroupId [src_id] Write session: destroy 2026-01-10T12:09:30.469366Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:09:30.469399Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:09:30.469462Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 5 size 116 offset: -1 2026-01-10T12:09:30.469501Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:09:30.469431Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0 grpc read done: success: 0 data: 2026-01-10T12:09:30.469537Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:30.469465Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0 grpc read failed 2026-01-10T12:09:30.469556Z node 2 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:09:30.469585Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:09:30.469595Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:30.469612Z node 2 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:09:30.469713Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2026-01-10T12:09:30.469623Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0 grpc closed 2026-01-10T12:09:30.469649Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|f1a2ea2-7e4696d6-8aeb4118-67aee129_0 is DEAD 2026-01-10T12:09:30.470937Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:09:30.497038Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 190 count 1 nextOffset 3 batches 1 2026-01-10T12:09:30.497094Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:09:30.497103Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:09:30.497116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:30.497372Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 178 WTime 1768046970497 2026-01-10T12:09:30.497536Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7593703915951698049:2470] destroyed 2026-01-10T12:09:30.497583Z node 2 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037892][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:09:30.497600Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:09:30.497664Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 178 2026-01-10T12:09:30.497693Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:09:30.497718Z node 2 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037892][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:09:30.500251Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 178 actorID [2:7593703913920343854:2389] 2026-01-10T12:09:30.500316Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:09:30.500364Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 123 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:09:30.500394Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 178 2026-01-10T12:09:30.500398Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:09:30.500428Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2026-01-10T12:09:30.500550Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:30.500570Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:30.500580Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:30.500593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:30.500602Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:30.500627Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:09:30.500659Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 5 requestId: cookie: 3 2026-01-10T12:09:30.597712Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:30.597742Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:30.597755Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:30.597771Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:30.597782Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:08:21.805903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:08:21.806010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:21.806046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:08:21.806105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:08:21.806144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:08:21.806175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:08:21.806231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:08:21.806290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:08:21.807070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:08:21.807415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:08:21.903710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:08:21.903792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.925457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:08:21.925586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:08:21.925777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:08:21.940739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:08:21.940990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:08:21.941811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:08:21.942059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:08:21.944778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:21.944960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:08:21.946178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:08:21.946264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:08:21.946338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:08:21.946383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:08:21.946425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:08:21.946711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:08:22.137970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.139960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.140068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.140198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.140307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:08:22.140398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... de 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:09:26.361644Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:09:26.361679Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-10T12:09:26.361713Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-10T12:09:26.363765Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:09:26.363875Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:26.363956Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:882:2841] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:26.365258Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:26.365517Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 308us result status StatusSuccess 2026-01-10T12:09:26.366183Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 2 2026-01-10T12:09:26.366963Z node 7 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-10T12:09:26.369840Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:09:26.369909Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 102 2026-01-10T12:09:26.369989Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-10T12:09:26.370071Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:09:26.370235Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2026-01-10T12:09:26.370339Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:09:26.370390Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2026-01-10T12:09:26.370469Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-10T12:09:26.370604Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:09:26.373751Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-10T12:09:26.373930Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-10T12:09:26.374128Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:09:26.376815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:09:26.377124Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2026-01-10T12:09:26.377340Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-10T12:09:26.377452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-10T12:09:26.377530Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:09:26.377576Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-10T12:09:26.377670Z node 7 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-10T12:09:26.377832Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-10T12:09:26.380795Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-10T12:09:26.380967Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:09:26.381084Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:09:26.381165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:09:26.381236Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:09:26.381283Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-10T12:09:26.381334Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-10T12:09:26.384709Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2026-01-10T12:09:26.385008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:26.385078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:26.385621Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:26.385744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:26.385810Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:1109:3063] TestWaitNotification: OK eventTxId 102 |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:16.823091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:17.024814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:17.033519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:17.034068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.034193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:17.484409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.484608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.492018Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046952871318 != 1768046952871321 2026-01-10T12:09:17.532916Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:17.590348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.726211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:18.123562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:18.147665Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:18.270408Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:18.270554Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:18.270725Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:09:18.484387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:18.484523Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:18.485211Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:18.485326Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:18.485744Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:18.485903Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:18.486072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:18.488454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:18.488990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:18.489843Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:18.489949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:18.547182Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:18.548332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:18.548642Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:09:18.548908Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:18.657543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:18.658457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:18.658598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:18.660988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:18.661072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:18.661169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:18.661578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:18.661723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:18.661829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:09:18.676650Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:18.777961Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:18.778188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:18.778341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:09:18.778393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:18.778433Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:18.778475Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:18.778724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:18.778775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:18.779338Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:18.779464Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:18.779598Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:18.779662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:18.779708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:18.779747Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:18.779785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:18.791261Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:18.791410Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:18.791602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:18.791658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:18.791707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:09:18.791830Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:09:18.791886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:18.792030Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:18.792327Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:18.792389Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:18.792526Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... g event TEvTxProcessing::TEvStreamClearancePending 2026-01-10T12:09:25.989310Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2026-01-10T12:09:25.989345Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-10T12:09:25.989516Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:25.989553Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:25.989613Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:25.989656Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:25.989711Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:09:25.989761Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2026-01-10T12:09:25.989816Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2026-01-10T12:09:25.989857Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-10T12:09:25.989892Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-10T12:09:25.989925Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2026-01-10T12:09:25.989959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-10T12:09:25.990205Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2026-01-10T12:09:25.990249Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:25.990283Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:09:25.990314Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:25.990345Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:25.990420Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:25.991029Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:975:2831], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:25.991098Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-10T12:09:25.991201Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2026-01-10T12:09:25.991239Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-10T12:09:25.991579Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:958:2816], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-10T12:09:25.991638Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:959:2816] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-10T12:09:25.991685Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:959:2816] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-10T12:09:25.991753Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-10T12:09:25.991874Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2026-01-10T12:09:25.991934Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-10T12:09:25.992413Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2026-01-10T12:09:25.992461Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-10T12:09:25.992525Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:959:2816] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-10T12:09:25.992613Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:09:25.992661Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715659, at: 72075186224037888 2026-01-10T12:09:25.992782Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:25.992848Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:25.992919Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:25.992963Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:09:25.993020Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2026-01-10T12:09:25.993064Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-10T12:09:25.993110Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2026-01-10T12:09:25.993172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-10T12:09:25.993218Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2026-01-10T12:09:25.993255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:09:25.993288Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-10T12:09:25.993324Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2026-01-10T12:09:25.993361Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:09:25.993396Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:09:25.993434Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:09:25.993482Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-10T12:09:25.993508Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:09:25.993535Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2026-01-10T12:09:25.993573Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:25.993612Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:09:25.993650Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:25.993687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:25.993754Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:25.993796Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-10T12:09:25.993843Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2026-01-10T12:09:25.993895Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2026-01-10T12:09:25.994146Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:25.994540Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:886:2765], Recipient [2:959:2816]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 523 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2026-01-10T12:09:25.994590Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1922: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2026-01-10T12:09:25.994707Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2920: [ReadTable [2:959:2816] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2026-01-10T12:09:25.995173Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 2500 TxId: 281474976715658 |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2026-01-10T12:08:32.774218Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703665988493313:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:32.780383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:33.378360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:33.561438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:33.561565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:33.568833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:33.608234Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:33.637121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:33.640348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703665988493072:2081] 1768046912607310 != 1768046912607313 2026-01-10T12:08:33.686862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:33.908090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:33.908115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:33.908122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:33.908216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:34.404722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:34.411996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:08:37.773846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703665988493313:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:37.773921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:38.529045Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:38.529088Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:116: [WorkloadService] [Service] Resource pools was disabled 2026-01-10T12:08:38.536144Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU= temp_dir_name# 7ab65ec9-4217-0f9c-6f03-089bb166a05c trace_id# 2026-01-10T12:08:38.537040Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:38.555829Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI= temp_dir_name# c3b87f14-4c69-1528-f854-898682c4ff75 trace_id# 2026-01-10T12:08:38.556191Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:38.556694Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ReadyState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7593703691758297720:2500] database# Root database_id# /Root pool_id# trace_id# 2026-01-10T12:08:38.556862Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Sending CompileQuery request trace_id# 2026-01-10T12:08:39.224682Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136313945226712 trace_id# 2026-01-10T12:08:39.224762Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:08:39.225117Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Created new KQP executer executer_id# [1:7593703696053265027:2323] is_rollback# false trace_id# 2026-01-10T12:08:39.292380Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Forwarded TEvStreamData to [1:7593703691758297720:2500] trace_id# 2026-01-10T12:08:39.309991Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:08:39.310238Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, TxInfo status# Committed kind# Pure total_duration# 85.797 server_duration# 85.655 queries_count# 2 trace_id# 2026-01-10T12:08:39.310323Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:08:39.310575Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:08:39.310603Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, EndCleanup is_final# true trace_id# 2026-01-10T12:08:39.310662Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: ExecuteState, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Sent query response back to proxy proxy_request_id# 3 proxy_id# [1:7593703665988493334:2265] trace_id# 2026-01-10T12:08:39.310708Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: unknown state, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:08:39.316632Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ODg4ODM4MjgtMTAzYzIxZWItZjY2YWZkODctOTg3Njg5MWI=, ActorId: [1:7593703691758297721:2323], ActorState: unknown state, LegacyTraceId: 01kekwxywwdbg0yd6xx62c0pan, Session actor destroyed trace_id# 2026-01-10T12:08:39.329786Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:08:39.329847Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:08:39.329872Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:08:39.329924Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:08:39.330001Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ZWM2MDQ3NTEtMTc5MDUyMDgtOThkNzU1ZmMtNmE0MTI4MTU=, ActorId: [1:7593703691758297719:2322], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:08:41.863106Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to ... -10T12:09:30.553555Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NTE0NjExMzAtYjBjNjNhZGEtYWZkZTNiNTItM2YxZjhjMDQ=, ActorId: [8:7593703913732161211:2333], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:09:30.553618Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NTE0NjExMzAtYjBjNjNhZGEtYWZkZTNiNTItM2YxZjhjMDQ=, ActorId: [8:7593703913732161211:2333], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:30.553654Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NTE0NjExMzAtYjBjNjNhZGEtYWZkZTNiNTItM2YxZjhjMDQ=, ActorId: [8:7593703913732161211:2333], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:09:30.553687Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NTE0NjExMzAtYjBjNjNhZGEtYWZkZTNiNTItM2YxZjhjMDQ=, ActorId: [8:7593703913732161211:2333], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:30.553775Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NTE0NjExMzAtYjBjNjNhZGEtYWZkZTNiNTItM2YxZjhjMDQ=, ActorId: [8:7593703913732161211:2333], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:09:30.555321Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:292: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128555:2927], ActorId: [8:7593703918027128556:2928], Starting query actor #1 [8:7593703918027128557:2929] 2026-01-10T12:09:30.555368Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:135: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2026-01-10T12:09:30.558717Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ= temp_dir_name# 43657cb4-435f-cc19-68f5-5eabf67eda03 trace_id# 2026-01-10T12:09:30.558861Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:09:30.559268Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:156: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], Successfully created session: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, run query 2026-01-10T12:09:30.559302Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], RunDataQuery with SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, TxId: , text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2026-01-10T12:09:30.559645Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ReadyState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Received request proxy_request_id# 6 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpc_actor# [8:7593703918027128560:2346] database# /Root database_id# /Root pool_id# trace_id# 2026-01-10T12:09:30.560161Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136313943719896 trace_id# 2026-01-10T12:09:30.560219Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:09:30.560408Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Created new KQP executer executer_id# [8:7593703918027128563:2345] is_rollback# false trace_id# 2026-01-10T12:09:30.565507Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:09:30.565679Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, TxInfo status# Committed kind# ReadOnly total_duration# 5.63 server_duration# 5.552 queries_count# 2 trace_id# 2026-01-10T12:09:30.565798Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:09:30.565894Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:30.565979Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, EndCleanup is_final# false trace_id# 2026-01-10T12:09:30.566073Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ExecuteState, LegacyTraceId: 01kekwzhnz881whztds3jnsza2, Sent query response back to proxy proxy_request_id# 6 proxy_id# [8:7593703892257323664:2268] trace_id# 2026-01-10T12:09:30.566390Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, TxId: 2026-01-10T12:09:30.566497Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, TxId: 2026-01-10T12:09:30.566546Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128556:2928], ActorId: [8:7593703918027128557:2929], Delete session: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ= 2026-01-10T12:09:30.566563Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7593703918027128555:2927], ActorId: [8:7593703918027128556:2928], Got response [8:7593703918027128557:2929] SUCCESS 2026-01-10T12:09:30.566716Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:09:30.566776Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:30.566813Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:09:30.566839Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:30.566904Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZDhkMGI0MDgtMWIzN2U5ODQtOWViM2VhMDEtNmMwNmRlMmQ=, ActorId: [8:7593703918027128559:2345], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:09:30.575198Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=OTdjNzhhMGYtYTUzYTg2MzItOTkzYjVmNDItM2ZlZGRlMjk=, ActorId: [8:7593703913732161126:2331], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:09:30.575272Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=OTdjNzhhMGYtYTUzYTg2MzItOTkzYjVmNDItM2ZlZGRlMjk=, ActorId: [8:7593703913732161126:2331], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:30.575313Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=OTdjNzhhMGYtYTUzYTg2MzItOTkzYjVmNDItM2ZlZGRlMjk=, ActorId: [8:7593703913732161126:2331], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:09:30.575353Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=OTdjNzhhMGYtYTUzYTg2MzItOTkzYjVmNDItM2ZlZGRlMjk=, ActorId: [8:7593703913732161126:2331], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:30.575460Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=OTdjNzhhMGYtYTUzYTg2MzItOTkzYjVmNDItM2ZlZGRlMjk=, ActorId: [8:7593703913732161126:2331], ActorState: unknown state, Session actor destroyed trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] Test command err: 2026-01-10T12:09:32.296308Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-10T12:09:32.296472Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-10T12:09:32.296524Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 1 2026-01-10T12:09:32.299787Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:32.300772Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:32.300881Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-10T12:09:33.296212Z node 6 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 6 2026-01-10T12:09:33.296321Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-10T12:09:33.296371Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-10T12:09:33.296425Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-10T12:09:33.296511Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-10T12:09:33.296570Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.296765Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:33.296792Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-10T12:09:33.523833Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-10T12:09:33.524734Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-10T12:09:33.524793Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-10T12:09:33.524871Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-10T12:09:33.525473Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.526016Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-10T12:09:33.526058Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-10T12:09:33.526119Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-10T12:09:33.526204Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.526487Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:280:2143] 2026-01-10T12:09:33.526644Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-10T12:09:33.526679Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 11436890671405667697 2026-01-10T12:09:33.527022Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:280:2143] 2026-01-10T12:09:33.527599Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 5 state: UNKNOWN 2026-01-10T12:09:33.527634Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-10T12:09:33.528367Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-10T12:09:33.528401Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 5380068269072606170 2026-01-10T12:09:33.529845Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 4 state: OWNER 2026-01-10T12:09:33.529881Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 72075186232723361, type: Dummy, become watch on node 4 (owner) 2026-01-10T12:09:33.665682Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-10T12:09:33.665785Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-10T12:09:33.665844Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-10T12:09:33.667023Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-10T12:09:33.667099Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-10T12:09:33.667141Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-10T12:09:33.667251Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-10T12:09:33.667340Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.667418Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.668480Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:334:2166] 2026-01-10T12:09:33.668925Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-10T12:09:33.668980Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-10T12:09:33.988936Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-10T12:09:33.989080Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-10T12:09:33.989130Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-10T12:09:33.990683Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 7 2026-01-10T12:09:33.990735Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-10T12:09:33.990837Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 7 2026-01-10T12:09:33.990936Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:33.991626Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [5:378:2102] 2026-01-10T12:09:33.992004Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-10T12:09:33.992049Z node 7 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |86.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:05:27.625142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:27.878159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:27.951219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:27.951979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:27.960483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:28.957230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:29.139410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:29.139571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:29.190362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:29.371362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:05:30.855508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:05:30.855576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:05:30.855630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:05:30.855859Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... blocking NKikimr::NSchemeShard::TEvSchemeShard::TEvDescribeScheme from MONITORING_REQUEST to FLAT_SCHEMESHARD_ACTOR cookie 1 2026-01-10T12:05:30.924986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:05:41.421954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:05:41.422040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-10T12:05:54.121552Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:54.121735Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:05:54.135530Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:54.138317Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:05:54.139590Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:849:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:54.139942Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:54.140218Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:54.141936Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:844:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:05:54.142143Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:05:54.142263Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:05:54.677421Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:05:54.876743Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.876883Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.878198Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:05:54.878319Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:05:54.941769Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:05:54.943035Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:54.943433Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:05:55.028334Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:05:55.043113Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:06:00.480696Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:06:00.486191Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 3: Status: 2 2026-01-10T12:06:00.486325Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(3)::Execute 2026-01-10T12:06:00.486379Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:06:00.486480Z node 3 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(3)::Complete 2026-01-10T12:06:00.490312Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(3)::Execute 2026-01-10T12:06:00.490449Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:06:00.490496Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(3) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2026-01-10T12:06:00.490550Z node 3 :HIVE DEBUG: hive_impl.cpp:2891: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 3) 2026-01-10T12:06:00.490600Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1226:2634] 2026-01-10T12:06:00.490655Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(3): waiting 3600.000000s 2026-01-10T12:06:00.491537Z node 3 :HIVE TRACE: hive_impl.cpp:150: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([3:872:2417]) [3:1226:2634] 2026-01-10T12:06:00.492495Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:06:00.492552Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:06:00.492593Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:06:00.493542Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:00.494429Z node 3 :HIVE TRACE: hive_impl.cpp:142: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1705:2996]) [3:1943:3000] 2026-01-10T12:06:00.495186Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 4: Status: 2 2026-01-10T12:06:00.495281Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(4)::Execute 2026-01-10T12:06:00.495325Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-10T12:06:00.495489Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(4)::Execute 2026-01-10T12:06:00.495578Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:06:00.495616Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(4) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2026-01-10T12:06:00.495659Z node 3 :HIVE DEBUG: hive_impl.cpp:2891: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(2, 4) 2026-01-10T12:06:00.495699Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1250:2657] 2026-01-10T12:06:00.495740Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(4): waiting 3600.000000s 2026-01-10T12:06:00.496028Z node 3 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2026-01-10T12:06:00.507695Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2026-01-10T12:06:00.507818Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-10T12:06:00.508150Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2026-01-10T12:06:00.508250Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-10T12:06:00.508473Z node 3 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888 ... known -> Disconnected 2026-01-10T12:09:28.115864Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.116398Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.116496Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.116806Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.116889Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.117196Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.117271Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.117545Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.117620Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.118143Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.118220Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.118566Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.118658Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.118983Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:28.119068Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:28.195889Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2026-01-10T12:09:28.196781Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 31 Cookie 31 2026-01-10T12:09:28.197184Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 32 Cookie 32 2026-01-10T12:09:28.197414Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 33 Cookie 33 2026-01-10T12:09:28.197569Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2026-01-10T12:09:28.197723Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2026-01-10T12:09:28.197887Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2026-01-10T12:09:28.198056Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2026-01-10T12:09:28.198438Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.199456Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.199823Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.200138Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.200417Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.200711Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.200964Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.201222Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.201459Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:28.262485Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.325323Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.342584Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.395276Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.435927Z node 33 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.469249Z node 29 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.486748Z node 30 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.507767Z node 32 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:28.618922Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:29.573682Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:29.573726Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:29.573748Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:29.574325Z node 29 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:29.647510Z node 32 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.647662Z node 33 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.647744Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.647825Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.647901Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.648364Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.652549Z node 30 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.652679Z node 29 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:29.652759Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-35" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 35 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-36" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 36 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-37" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 37 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-35" reason: "YELLOW-7932-1231c6b1-36" reason: "YELLOW-7932-1231c6b1-37" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-29" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-29" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-29" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 29 host: "::1" port: 12001 } >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling >> TTabletPipeTest::TestConsumerSidePipeReset |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TStreamingQueryTest::ParallelAlterStreamingQuery >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryWithProperties >> TStreamingQueryTest::CreateStreamingQueryOrReplace >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestConnectReject >> TStreamingQueryTest::CreateStreamingQuery >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] |86.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TStreamingQueryTest::DropStreamingQueryTwice >> TStreamingQueryTest::ParallelCreateStreamingQuery >> TStreamingQueryTest::AlterStreamingQuery >> TStreamingQueryTest::ParallelCreateSameStreamingQuery >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore |86.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TResourceBroker::TestResubmitTask >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestSendAfterReboot >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling [GOOD] >> ConfiguredTabletBootstrapperTest::NodeListChange |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> TResourceBroker::TestResubmitTask [GOOD] |86.0%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TResourceBroker::TestUpdateCookie >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> TPipeCacheTest::TestAutoConnect [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries >> KqpWorkloadService::TestStartQueryAfterCancel >> TStreamingQueryTest::CreateStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> TStreamingQueryTest::CreateStreamingQueryWithProperties [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace [GOOD] >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] >> TStreamingQueryTest::DropStreamingQuery >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists >> ResourcePoolsDdl::TestAlterResourcePool >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::AlterStreamingQuery [GOOD] >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] >> BootstrapperTest::KeepExistingTablet >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] |86.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] |86.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:37.790038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:37.790162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:37.790211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:37.790254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:37.790299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:37.790330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:37.790427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:37.790523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:37.791547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:37.791883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:37.889669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:37.889740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:37.906818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:37.906934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:37.907147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:37.915891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:37.916156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:37.916955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:37.917189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:37.919430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:37.919596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:37.920766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:37.920829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:37.920905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:37.920955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:37.921065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:37.921376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.085388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.086509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.086668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.086755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.086857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.086926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.087752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-10T12:09:38.917763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.917784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.917861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-10T12:09:38.917903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.917918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.917959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:09:38.918882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.918907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.918979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-10T12:09:38.919013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-10T12:09:38.919390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-10T12:09:38.919618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-10T12:09:38.919705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.919913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.919926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.920023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.920045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.920160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.920180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.920258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.920276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:794:2784] 2026-01-10T12:09:38.920403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-10T12:09:38.920425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:794:2784] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2026-01-10T12:09:38.923559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:38.923798Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 225us result status StatusSuccess 2026-01-10T12:09:38.924158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.1%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.176979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.177101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.177145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.177189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.177234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.177263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.177332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.177424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.178441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.178746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.279015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.279079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.295996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.296131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.296366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.310586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.310797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.311402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.311588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.313646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.313785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.314718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.314767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.314825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.314862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.314958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.315156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.465458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.466957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... escribe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.155808Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 262us result status StatusSuccess 2026-01-10T12:09:39.156250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.156755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.156949Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 148us result status StatusSuccess 2026-01-10T12:09:39.157244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2026-01-10T12:09:39.157564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:39.157622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:09:39.157727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:39.157751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:09:39.157801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:09:39.157820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:09:39.158495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:39.158639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:39.158680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:718:2708] 2026-01-10T12:09:39.158898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:39.158979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:09:39.159026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:39.159051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:718:2708] 2026-01-10T12:09:39.159176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:39.159219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:718:2708] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-10T12:09:39.159749Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.159940Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 185us result status StatusSuccess 2026-01-10T12:09:39.160279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-10T12:09:39.163730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "NilNoviSubLuna" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:39.164000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 104:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "NilNoviSubLuna" } 2026-01-10T12:09:39.164102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 104:0, path# /MyRoot/NilNoviSubLuna 2026-01-10T12:09:39.164247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:09:39.166702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:39.167046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: NilNoviSubLuna TestModificationResult got TxId: 104, wait until txId: 104 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] Test command err: 2026-01-10T12:09:37.591698Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-10T12:09:37.591867Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-10T12:09:37.597183Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:37.599533Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:37.599603Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-10T12:09:37.755507Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 1 2026-01-10T12:09:37.756859Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-10T12:09:37.756944Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 1 2026-01-10T12:09:37.757051Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-10T12:09:38.469989Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-10T12:09:38.470111Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-10T12:09:38.470227Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 3 2026-01-10T12:09:38.470309Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:38.470647Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:38.470691Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-10T12:09:38.659173Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 3 2026-01-10T12:09:38.659303Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-10T12:09:38.659351Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 3 2026-01-10T12:09:38.660591Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-10T12:09:38.660653Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-10T12:09:38.660750Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 4 2026-01-10T12:09:38.660854Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:38.661740Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: OK, leader: [3:221:2142] 2026-01-10T12:09:38.662192Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723360, type: Dummy, connect: ERROR 2026-01-10T12:09:38.662238Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot >> TStreamingQueryTest::DropStreamingQuery [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.000131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.000241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.000286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.000337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.000383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.000418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.000506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.000599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.001664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.001972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.103230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.103314Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.119749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.119856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.120103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.129047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.129326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.130099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.130375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.132739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.132921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.134212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.134278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.134353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.134400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.134517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.134820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.295250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.296923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.297565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-10T12:09:39.308476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-10T12:09:39.308628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:39.309687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.309799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.309842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:39.309886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:09:39.311322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:09:39.312637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.312731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.312760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:39.312786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 2 2026-01-10T12:09:39.312819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:09:39.312883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-10T12:09:39.315332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-10T12:09:39.315493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:09:39.316638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 2026-01-10T12:09:39.317211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:09:39.317447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:39.317571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.317633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 105:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000040, at schemeshard: 72057594046678944 2026-01-10T12:09:39.317767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:09:39.317934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:09:39.318002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 FAKE_COORDINATOR: Erasing txId 105 2026-01-10T12:09:39.320146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:39.320193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:39.320374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:09:39.320440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:39.320492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:861:2819], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-10T12:09:39.320538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:861:2819], at schemeshard: 72057594046678944, txId: 105, path id: 40 2026-01-10T12:09:39.320617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:09:39.320657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:09:39.320763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:09:39.320802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:09:39.320845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:09:39.320886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:09:39.320930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:09:39.320973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:09:39.321038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:09:39.321078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:09:39.321148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:09:39.321197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-10T12:09:39.321263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2026-01-10T12:09:39.321336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-10T12:09:39.322563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.322694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.322739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:39.322798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2026-01-10T12:09:39.322848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:09:39.323598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.323668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:39.323692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:39.323723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:09:39.323758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:09:39.323848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:09:39.326724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:09:39.328311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.150660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.150739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.150768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.150810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.150840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.150863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.150911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.150994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.151823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.152044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.230992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.231044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.244516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.244637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.244854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.255096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.255350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.256095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.256380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.260969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.261162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.262851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.263344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.263492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.263541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.263650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.264260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.410834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.411893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.412990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.413072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... th "/MyRoot/DirA/MyStreamingQuery1" took 282us result status StatusSuccess 2026-01-10T12:09:39.450686Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.451501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.451717Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 218us result status StatusSuccess 2026-01-10T12:09:39.452020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.452769Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.452949Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 162us result status StatusSuccess 2026-01-10T12:09:39.453266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.453777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.453948Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 168us result status StatusSuccess 2026-01-10T12:09:39.454208Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.454625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:39.454792Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 185us result status StatusSuccess 2026-01-10T12:09:39.455073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> BasicUsage::CloseWriteSessionImmediately [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:105:2138] ... blocking block result NO_GROUP for [1:106:2138] ... blocking block result NO_GROUP for [1:107:2138] ... blocking block result NO_GROUP for [1:108:2138] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.024774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.024866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.024967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.025009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.025048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.025080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.025161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.025261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.026219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.026546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.105019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.105078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.120224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.120326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.120508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.129917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.130189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.130881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.131096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.133516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.133673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.134810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.134871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.134939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.134987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.135112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.135382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.289272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.290995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.291052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.291144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.291241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.291335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.291429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :09:39.895817Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:09:39.897564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:39.897636Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:39.897686Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:09:39.899119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:39.899169Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:39.899222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:39.899285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:09:39.899421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:39.900719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:09:39.900866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:09:39.901177Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:39.901313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.901375Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:39.901662Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:09:39.901729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:39.901894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:39.901975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:09:39.903993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:39.904055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:39.904327Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:39.904389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:09:39.904737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:39.904797Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:09:39.904919Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:39.904965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:39.905014Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:39.905058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:39.905106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:09:39.905152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:39.905221Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:09:39.905261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:09:39.905335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:39.905379Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:09:39.905423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:09:39.905960Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:39.906067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:39.906107Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:09:39.906143Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:09:39.906190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:39.906267Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:09:39.908700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:09:39.909146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:09:39.910136Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-01-10T12:09:39.911170Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-01-10T12:09:39.914341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:39.914542Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-10T12:09:39.914672Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-10T12:09:39.915724Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:09:39.918256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:39.918479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER STREAMING QUERY, path: MyStreamingQuery 2026-01-10T12:09:39.919040Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:39.919266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:39.919314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:39.919669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:39.919766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:39.919821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2684] TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:37.980007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:37.980166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:37.980225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:37.980278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:37.980325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:37.980364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:37.980458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:37.980575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:37.981740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:37.982104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.077562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.077634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.094924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.095070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.095313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.105011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.105278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.106054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.106294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.108768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.108936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.110137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.110198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.110274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.110317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.110443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.110684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.257126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.258950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.259045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.259134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.259255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.259355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-10T12:09:40.052738Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:40.052863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:40.052918Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:09:40.053008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:40.053116Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:09:40.053301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.053364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:40.054494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:09:40.055098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:09:40.056692Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:40.056731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:40.056881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:09:40.057012Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:40.057041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:09:40.057072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:09:40.057341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.057391Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:09:40.057523Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:09:40.057563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:09:40.057610Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:09:40.057645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:09:40.057686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:09:40.057735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:09:40.057775Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:09:40.057812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:09:40.057888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:40.057965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:09:40.058006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:09:40.058039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:09:40.058462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:40.058558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:40.058592Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:09:40.058642Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:09:40.058689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:40.059054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:40.059103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:09:40.059202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:40.059508Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:40.059583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:40.059614Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:09:40.059651Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:09:40.059681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.059761Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:09:40.063431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:09:40.063563Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:09:40.063688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:40.063887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:40.063940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:40.064382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:40.064487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.064533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:738:2727] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:40.065050Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:40.065249Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 232us result status StatusPathDoesNotExist 2026-01-10T12:09:40.065452Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyStreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.065619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.065692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.065726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.065753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.065780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.065804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.065857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.065934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.066641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.066862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.155338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.155395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.171499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.171627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.171819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.181769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.182039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.182832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.183075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.185568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.185748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.186991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.187055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.187131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.187182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.187323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.187598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.339395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.340976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.341651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.137988Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:40.138047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:09:40.138108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:40.138722Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.138806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.138859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:40.138894Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:09:40.138927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:40.139013Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:09:40.141861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:40.142943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:40.143177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:40.143230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:40.143610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:40.143702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.143744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:710:2699] TestWaitNotification: OK eventTxId 101 2026-01-10T12:09:40.145211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:40.145429Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 262us result status StatusSuccess 2026-01-10T12:09:40.145900Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:09:40.149426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:40.149778Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 102:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } 2026-01-10T12:09:40.149907Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 102:0, path# /MyRoot/MyStreamingQuery 2026-01-10T12:09:40.150055Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:09:40.153007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:40.153244Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: MyStreamingQuery TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:40.153552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:40.153594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:40.153877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:40.153948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.153976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:718:2707] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:40.154286Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:40.154423Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 169us result status StatusSuccess 2026-01-10T12:09:40.154716Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.000599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.000696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.000749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.000796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.000860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.000899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.000960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.001039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.002036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.002339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.099378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.099453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.118488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.125399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.125602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.134805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.135010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.135875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.136336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.140489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.140688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.141908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.141970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.142076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.142122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.142252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.142517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.299346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.300985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.301510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:40.006080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:40.006229Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:09:40.006385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.006459Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:09:40.006504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:09:40.006549Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:09:40.008301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.008367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:40.008411Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:09:40.010010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.010049Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.010120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.010162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.010286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:40.011636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:09:40.011766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:09:40.012060Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:40.012182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:40.012234Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.012497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:09:40.012554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.012712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.012780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:09:40.014538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:40.014584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:40.014747Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:40.014793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:09:40.015106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.015169Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:09:40.015286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:40.015324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.015364Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:40.015400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.015468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:09:40.015513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.015553Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:09:40.015589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:09:40.015660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:40.015700Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:09:40.015737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:09:40.016352Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:40.016456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:40.016500Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:09:40.016574Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:09:40.016618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.016705Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:09:40.019378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:09:40.019744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:09:40.020331Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-01-10T12:09:40.021324Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-01-10T12:09:40.023722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:40.023929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2026-01-10T12:09:40.024008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2026-01-10T12:09:40.024308Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-10T12:09:40.025324Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:09:40.027811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:40.028019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE STREAMING QUERY, path: 2026-01-10T12:09:40.028655Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.101842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.101950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.101999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.102032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.102067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.102088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.102184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.102276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.103263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.103514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.176505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.176564Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.194554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.194709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.194924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.214713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.214986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.215860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.216138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.218818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.219000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.220272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.220324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.220408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.220455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.220564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.220818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.344410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.345529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.345692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.345799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.345889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.345988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.346866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... __publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:09:40.206202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:09:40.206467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.206510Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:09:40.206614Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:40.206644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:40.206675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:40.206701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:40.206735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:09:40.206768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:40.206798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:09:40.206866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:09:40.206937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:40.206970Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:09:40.207000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:09:40.207025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:09:40.207622Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.207718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.207753Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:40.207808Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:09:40.207865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:40.208374Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.208438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:40.208487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:40.208512Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:09:40.208535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:40.208606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:09:40.211267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:40.212300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:40.212532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:40.212581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:40.212959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:40.213061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.213101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:710:2699] TestWaitNotification: OK eventTxId 101 2026-01-10T12:09:40.213529Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:40.213732Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 242us result status StatusSuccess 2026-01-10T12:09:40.214112Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:09:40.217743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "UniqueName" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:40.217955Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-10T12:09:40.218124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, at schemeshard: 72057594046678944 2026-01-10T12:09:40.220455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:40.220749Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, operation: ALTER STREAMING QUERY, path: UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:40.221069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:40.221124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:40.221547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:40.221659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.221703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:718:2707] TestWaitNotification: OK eventTxId 102 |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:38.101321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:38.101402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.101437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:38.101466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:38.101494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:38.101518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:38.101582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:38.101656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:38.102422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:38.102690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:38.182434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:38.182502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:38.198525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:38.198655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:38.198870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:38.208269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:38.208545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:38.209346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:38.209634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:38.212196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.212392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:38.213740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:38.213805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:38.213880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:38.213926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:38.214058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:38.214349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:38.370927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.371994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.372993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.373080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.373181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:38.373270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:09:40.207085Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:09:40.209142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.209216Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:40.209264Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:09:40.211030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.211083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.211136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.211187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.211345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:40.213437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:09:40.213581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:09:40.213918Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:40.214047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:40.214096Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.214380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:09:40.214442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:40.214628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.214705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:09:40.220668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:40.220733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:40.221003Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:40.221059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:09:40.221390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:40.221448Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:09:40.221572Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:40.221615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.221662Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:40.221707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.221757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:09:40.221807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:40.221850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:09:40.221886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:09:40.221983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:40.222036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:09:40.222078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:09:40.222835Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:40.222972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:40.223028Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:09:40.223075Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:09:40.223124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:40.223228Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:09:40.226185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:09:40.226591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:09:40.226951Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2670] Bootstrap 2026-01-10T12:09:40.227935Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2670] Become StateWork (SchemeCache [2:685:2675]) 2026-01-10T12:09:40.230772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:40.230923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-10T12:09:40.231027Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-10T12:09:40.232071Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:09:40.234468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:40.234714Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP STREAMING QUERY, path: MyStreamingQuery 2026-01-10T12:09:40.235445Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:40.235716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:40.235767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:40.236149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:40.236249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:40.236289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2685] TestWaitNotification: OK eventTxId 101 |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TResourceBroker::TestRealUsage |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig [GOOD] >> BootstrapperTest::LoneBootstrapper >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2026-01-10T12:09:10.900567Z :BasicWriteSession INFO: Random seed for debugging is 1768046950900534 2026-01-10T12:09:11.245341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703833096435266:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:11.245830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:11.396363Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:11.411333Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703834585897922:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:11.414961Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:11.564911Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:12.117076Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:12.144677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:12.211601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:12.211741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:12.213127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:12.213228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:12.274858Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:12.277756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:12.278965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:12.307066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:12.322654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:12.336196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:12.375792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:12.534779Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:12.632960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0022ab/r3tmp/yandexWsKlw2.tmp 2026-01-10T12:09:12.632979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0022ab/r3tmp/yandexWsKlw2.tmp 2026-01-10T12:09:12.633156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0022ab/r3tmp/yandexWsKlw2.tmp 2026-01-10T12:09:12.633251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:12.681501Z INFO: TTestServer started on Port 1238 GrpcPort 28891 PQClient connected to localhost:28891 2026-01-10T12:09:12.923760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:16.250397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703833096435266:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.250474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:16.416575Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703834585897922:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.416666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:17.106599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703858866240345:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.116266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.118794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703858866240383:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.118855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.118906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703858866240382:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.121168Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703860355702033:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.121236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703860355702022:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.121320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.121628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703860355702037:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.121749Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.123723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:17.180243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703858866240419:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.263315Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703860355702039:2134] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:09:17.309897Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703860355702036:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:09:17.332880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.341172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:09:17.341463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703858866240386:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:09:17.420111Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703860355702066:2140] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:17.437253Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703858866240483:2971] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:17.816465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsa ... 09:38.972631Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:39.034453Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:67: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2026-01-10T12:09:39.034811Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593703952944946057:2457] connected; active server actors: 1 2026-01-10T12:09:39.034868Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:80: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2026-01-10T12:09:39.034880Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) Update the table 2026-01-10T12:09:39.035097Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593703952944946057:2457] disconnected. 2026-01-10T12:09:39.035126Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593703952944946057:2457] disconnected; active server actors: 1 2026-01-10T12:09:39.035143Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593703952944946057:2457] disconnected no session 2026-01-10T12:09:39.072895Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.072927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.072937Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.072955Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.072970Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:39.167137Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-10T12:09:39.167194Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2026-01-10T12:09:39.167218Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7593703948649978720:2457] (SourceId=src, PreferedPartition=(NULL)) Start idle 2026-01-10T12:09:39.167261Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:09:39.168105Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [3:7593703952944946085:2457], now have 1 active actors on pipe 2026-01-10T12:09:39.168242Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2026-01-10T12:09:39.168437Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:09:39.168472Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:09:39.168548Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-01-10T12:09:39.168589Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.168607Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:09:39.168627Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:09:39.168641Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.168662Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:09:39.168693Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:09:39.168705Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:09:39.168719Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:39.168758Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-10T12:09:39.168801Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:09:39.169232Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:09:39.169271Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:09:39.169328Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:09:39.169572Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 2026-01-10T12:09:39.170367Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1768046979170 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:39.170502Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-10T12:09:39.170739Z :INFO: [] MessageGroupId [src] SessionId [src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0] Write session: close. Timeout = 0 ms 2026-01-10T12:09:39.170786Z :INFO: [] MessageGroupId [src] SessionId [src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0] Write session will now close 2026-01-10T12:09:39.170848Z :DEBUG: [] MessageGroupId [src] SessionId [src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0] Write session: aborting 2026-01-10T12:09:39.171336Z :INFO: [] MessageGroupId [src] SessionId [src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:09:39.171373Z :DEBUG: [] MessageGroupId [src] SessionId [src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0] Write session: destroy 2026-01-10T12:09:39.173216Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.173247Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.173262Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.173281Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.173295Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:39.175168Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 grpc read done: success: 0 data: 2026-01-10T12:09:39.175196Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 grpc read failed 2026-01-10T12:09:39.175421Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 1 sessionId: src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 2026-01-10T12:09:39.175461Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|4c0a9281-cb36e9fe-8e0fddda-3c04b35d_0 is DEAD 2026-01-10T12:09:39.175839Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:09:39.176245Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7593703952944946085:2457] destroyed 2026-01-10T12:09:39.176298Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:09:39.176323Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.176337Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.176350Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.176373Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.176388Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2026-01-10T12:09:39.273621Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.273663Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.273679Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.273696Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.273710Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:39.373999Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:39.374042Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.374061Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:39.374080Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:39.374095Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> BootstrapperTest::DuplicateNodes [GOOD] >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TResourceBroker::TestRandomQueue [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2026-01-10T12:09:41.120629Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:41.120743Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:41.121567Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:41.121629Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-10T12:09:41.121771Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:41.121801Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2026-01-10T12:09:41.125463Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-10T12:09:41.125564Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2026-01-10T12:09:41.125738Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-10T12:09:41.125775Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-10T12:09:41.344452Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:41.345165Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:222:2097] 2026-01-10T12:09:41.345698Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:41.345747Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TSchemeShardSecretTest::CreateNotInDatabase |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:123:2150] sender: [1:125:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:165:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:123:2150] sender: [1:166:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:113:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:132:2057] recipient: [2:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:173:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:208:2057] recipient: [2:14:2061] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2026-01-10T12:09:42.605689Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-1 (1 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.605765Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-1 (1 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.605968Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-6 (6 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606183Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-14 (14 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606274Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-17 (17 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606317Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-18 (18 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606354Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-19 (19 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606394Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-20 (20 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606468Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-23 (23 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606650Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-31 (31 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606776Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-36 (36 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.606972Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-46 (46 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607027Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-48 (48 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607111Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-52 (52 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607431Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-66 (66 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607477Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-67 (67 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607557Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-70 (70 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607774Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-79 (79 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607884Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-83 (83 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.607930Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-84 (84 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.608004Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-87 (87 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.608058Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-89 (89 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612287Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-91 (91 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612359Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-92 (92 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612401Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-93 (93 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612443Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-94 (94 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612494Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-95 (95 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612566Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-97 (97 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612618Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-98 (98 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612656Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-99 (99 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612734Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-102 (102 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612800Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-104 (104 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612845Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-105 (105 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.612904Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-107 (107 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613050Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-111 (111 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613239Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-120 (120 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613405Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-126 (126 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613492Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-129 (129 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613900Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-150 (150 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.613996Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-153 (153 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614094Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-157 (157 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614190Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-161 (161 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614282Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-163 (163 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614393Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-167 (167 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614469Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-170 (170 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614515Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-171 (171 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614802Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-181 (181 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614884Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-183 (183 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614924Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-184 (184 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.614978Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-186 (186 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615048Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-187 (187 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615089Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-188 (188 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615130Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-189 (189 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615324Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-197 (197 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615418Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-201 (201 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615492Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-203 (203 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615620Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-208 (208 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615660Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-209 (209 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615732Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-212 (212 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.615930Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-221 (221 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616127Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-229 (229 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616167Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-230 (230 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616247Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-233 (233 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616286Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-234 (234 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616359Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-237 (237 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616432Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-240 (240 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.616705Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-251 (251 ... CE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-778 (778 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.667998Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-792 (792 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668102Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-817 (817 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668144Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-819 (819 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668297Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-844 (844 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668372Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-882 (882 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668411Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-886 (886 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668484Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-912 (912 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668535Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-932 (932 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668570Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-937 (937 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668606Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-959 (959 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668651Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-999 (999 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668690Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-18 (18 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668756Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-20 (20 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668814Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-46 (46 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.668855Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-48 (48 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669010Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-83 (83 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669053Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-89 (89 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669094Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-92 (92 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669121Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-95 (95 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669199Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-97 (97 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669268Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-102 (102 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669294Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-104 (104 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669361Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-129 (129 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669472Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-150 (150 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669520Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-157 (157 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669601Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-171 (171 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669642Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-188 (188 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669740Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-221 (221 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669824Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-230 (230 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669908Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-267 (267 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.669948Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-277 (277 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670009Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-285 (285 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670053Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-303 (303 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670137Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-321 (321 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670183Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-346 (346 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670267Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-375 (375 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670294Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-381 (381 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670383Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-432 (432 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670410Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-448 (448 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670568Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-480 (480 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670614Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-493 (493 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670677Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-503 (503 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670736Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-526 (526 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.670924Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-595 (595 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671005Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-625 (625 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671075Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-635 (635 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671114Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-643 (643 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671142Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-661 (661 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671200Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-665 (665 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671297Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-701 (701 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671344Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-720 (720 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671396Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-736 (736 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671522Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-772 (772 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671568Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-776 (776 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671605Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-781 (781 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671689Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-816 (816 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671767Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-856 (856 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671830Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-870 (870 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671892Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-877 (877 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.671946Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-919 (919 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672048Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-954 (954 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672144Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-968 (968 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672205Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-972 (972 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672267Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-978 (978 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672312Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-979 (979 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-10T12:09:42.672419Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-996 (996 by [2:103:2137])' of unknown type 'wrong' to default queue |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> TSchemeShardSecretTest::DefaultDescribeSecret >> TSchemeShardSecretTest::CreateSecret >> TSchemeShardSecretTest::DropSecret >> TSchemeShardSecretTest::EmptySecretName >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret >> DataShardVolatile::DistributedWriteThenDropTable >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:43.396826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:43.396984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:43.397040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:43.397086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:43.397146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:43.397196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:43.397291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:43.397379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:43.398485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:43.398857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:43.508888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:43.508949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:43.528933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:43.529105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:43.529330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:43.545885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:43.546167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:43.547164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:43.547451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:43.552881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:43.553117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:43.554749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:43.554820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:43.554933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:43.554984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:43.555033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:43.555309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:43.706667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.707895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.708945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.709058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.709142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:46.554229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-10T12:09:46.554274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:09:46.554317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:09:46.554596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:46.554642Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:09:46.554739Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:46.554777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:46.554815Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:46.554846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:46.554889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:09:46.554956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:46.555011Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:09:46.555060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:09:46.555133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:46.555176Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-10T12:09:46.555212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:09:46.555246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:09:46.555270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-10T12:09:46.556291Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.556399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.556440Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:46.556478Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:09:46.556522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:46.557376Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.557462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.557495Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:46.557524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:09:46.557558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:46.558742Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.558833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:46.558859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:46.558888Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:09:46.558918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:09:46.559008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:09:46.559349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:46.559396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:09:46.559515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:46.561995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:46.563771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:46.563883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:46.565392Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2026-01-10T12:09:46.565759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:09:46.565805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2026-01-10T12:09:46.565927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:09:46.565951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:09:46.566422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:09:46.566546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:09:46.566586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:773:2763] 2026-01-10T12:09:46.566737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:09:46.566807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:09:46.566830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:773:2763] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2026-01-10T12:09:46.567375Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:46.567573Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 269us result status StatusPathDoesNotExist 2026-01-10T12:09:46.567739Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2026-01-10T12:09:43.263215Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:43.263294Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:43.263320Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:43.265239Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:43.265286Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-10T12:09:43.265424Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:43.265442Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2026-01-10T12:09:43.265603Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-10T12:09:43.265622Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 13151740404452589043 2026-01-10T12:09:43.266472Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-10T12:09:43.266668Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-10T12:09:43.266889Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-10T12:09:43.266921Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-10T12:09:43.267157Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-10T12:09:43.267198Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2026-01-10T12:09:43.267403Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-10T12:09:43.267599Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-10T12:09:43.267629Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.153108s 2026-01-10T12:09:43.514323Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:43.516046Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:43.516807Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:43.516864Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-10T12:09:43.573234Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:43.573986Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:43.574483Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:43.574529Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2026-01-10T12:09:44.613723Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-10T12:09:44.613803Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2026-01-10T12:09:44.614283Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-10T12:09:44.614342Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:44.614458Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-10T12:09:44.614485Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:44.656147Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:44.657254Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:44.658907Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:44.658967Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-10T12:09:44.659120Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:44.659151Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2026-01-10T12:09:45.599150Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-10T12:09:45.599256Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2026-01-10T12:09:45.599883Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-10T12:09:45.599961Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:45.600063Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-10T12:09:45.600113Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:45.602765Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:45.602957Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-10T12:09:45.604751Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-10T12:09:45.604824Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2026-01-10T12:09:45.604909Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-10T12:09:45.604939Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2026-01-10T12:09:45.606201Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-10T12:09:45.606297Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-10T12:09:45.606521Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2026-01-10T12:09:45.606563Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2026-01-10T12:09:45.608059Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2026-01-10T12:09:45.608124Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2026-01-10T12:09:46.469194Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2026-01-10T12:09:46.469305Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:46.469395Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2026-01-10T12:09:46.469433Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:46.470404Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-10T12:09:46.470624Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-10T12:09:46.471254Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-10T12:09:46.471300Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2026-01-10T12:09:46.471468Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-10T12:09:46.471497Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2026-01-10T12:09:46.472328Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2026-01-10T12:09:46.472412Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2026-01-10T12:09:46.472534Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-10T12:09:46.472581Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-10T12:09:46.472612Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.127990s 2026-01-10T12:09:46.472665Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2026-01-10T12:09:46.472690Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2026-01-10T12:09:46.472735Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-10T12:09:46.474925Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2026-01-10T12:09:46.474999Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:46.486270Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:439:2098] 2026-01-10T12:09:46.521376Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:46.521443Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-10T12:09:46.605363Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-10T12:09:46.606224Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:439:2098] 2026-01-10T12:09:46.606771Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-10T12:09:46.606812Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2026-01-10T12:09:15.628278Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1768046955628232 2026-01-10T12:09:16.227303Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703857130247757:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.227370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:16.384504Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:16.418827Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703856838054247:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.418892Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:16.514227Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:17.161063Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.200688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.327147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.327257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.327466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.327523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.475906Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.517326Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:17.517614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.520198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.689147Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.690083Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.690243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.713550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:17.753482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.806413Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.023275s 2026-01-10T12:09:18.093317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00227f/r3tmp/yandexPq1C1R.tmp 2026-01-10T12:09:18.093344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00227f/r3tmp/yandexPq1C1R.tmp 2026-01-10T12:09:18.093501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00227f/r3tmp/yandexPq1C1R.tmp 2026-01-10T12:09:18.093617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:18.196407Z INFO: TTestServer started on Port 18427 GrpcPort 2514 PQClient connected to localhost:2514 2026-01-10T12:09:18.724304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:21.230536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703857130247757:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:21.230613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:21.419428Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703856838054247:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:21.419517Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:22.434604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882607858171:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.434713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.435224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882607858187:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.435293Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.435824Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882607858184:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.444065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:22.477583Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703882607858189:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:22.872258Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703882607858217:2141] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:22.872710Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.190277s 2026-01-10T12:09:22.872754Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.190330s 2026-01-10T12:09:22.914022Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703882900052843:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:22.915941Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2MwYzM3ZDEtNDYxNGI5NzUtZGVmMWY5MzktMzY3YjJiNGM=, ActorId: [1:7593703882900052796:2332], ActorState: ExecuteState, LegacyTraceId: 01kekwz9st7c60bvard7f2ykd3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:22.918190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:22.918332Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:09:22.916741Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703882607858226:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 20 ... dingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.436121Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:44.535708Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:44.535749Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.535763Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:44.535781Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.535795Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:44.636372Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:44.636407Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.636420Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:44.636440Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.636452Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:44.740263Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:44.740304Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.740319Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:44.740338Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.740351Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:44.844217Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:44.844254Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.844269Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:44.844287Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.844299Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:44.945624Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:44.945659Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.945675Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:44.945694Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.945710Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.052321Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.052359Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.052375Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.052395Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.052408Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.153554Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.153591Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.153605Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.153624Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.153638Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.256197Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.256234Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.256258Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.256276Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.256289Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.356365Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.356407Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.356422Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.356440Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.356455Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.460446Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.460481Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.460494Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.460511Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.460524Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.564959Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.564995Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.565008Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.565024Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.565037Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.671213Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.671251Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.671265Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.671284Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.671308Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.771821Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.771857Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.771872Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.771891Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.771904Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.878863Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.878890Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.878899Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.878914Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.878922Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:46.102496Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [3:7593703983784551672:2490] TxId: 281474976710678. Ctx: { TraceId: 01kekx00ew4yqk2w1hmww9vm18, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MTNmMGQwMGQtNTVlYWZjZTEtYWMwODgwOTctNjQ2Njk3ZmE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-10T12:09:46.103214Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593703983784551676:2490], TxId: 281474976710678, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx00ew4yqk2w1hmww9vm18. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=MTNmMGQwMGQtNTVlYWZjZTEtYWMwODgwOTctNjQ2Njk3ZmE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593703983784551672:2490], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] |86.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2026-01-10T12:09:15.671801Z :FallbackToSingleDb INFO: Random seed for debugging is 1768046955671762 2026-01-10T12:09:16.243281Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703856557903311:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.243358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:16.465476Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703855324401287:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:16.468327Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:16.465967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:16.569495Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:17.144288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.172187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:17.272454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:17.352003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.356357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.362712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:17.362807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:17.370583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.387407Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:17.400736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:17.537382Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.548668Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.589431Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:17.622136Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.631385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.766707Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.021208s 2026-01-10T12:09:18.006646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002275/r3tmp/yandexhqHr7w.tmp 2026-01-10T12:09:18.006671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002275/r3tmp/yandexhqHr7w.tmp 2026-01-10T12:09:18.006842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002275/r3tmp/yandexhqHr7w.tmp 2026-01-10T12:09:18.006927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:18.092204Z INFO: TTestServer started on Port 9569 GrpcPort 19897 PQClient connected to localhost:19897 2026-01-10T12:09:19.020933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:21.245354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703856557903311:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:21.245460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:21.420588Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703855324401287:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:21.420675Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:23.168866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703885389172676:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.168866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703886622675648:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169099Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703885389172689:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703886622675676:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703886622675678:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169519Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703885389172692:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.169593Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.173251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:23.197827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703886622675713:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.197934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.198207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703886622675715:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.198258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:23.243768Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703885389172695:2136] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:09:23.252885Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703886622675680:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:09:23.256205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:09:23.256528Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703885389172693:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
... action and tx pending commits 2026-01-10T12:09:44.904253Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:44.904266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.004651Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.004682Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.004696Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.004726Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.004737Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.105131Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.105184Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.105197Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.105216Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.105227Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.208240Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.208273Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.208287Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.208308Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.208319Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.309745Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.309805Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.309839Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.309876Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.309891Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.409903Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.409935Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.409955Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.409985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.409997Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.511157Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.511191Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.511206Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.511230Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.511242Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2026-01-10T12:09:45.563362Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2026-01-10T12:09:45.563469Z :INFO: [/Root] [] [17fb192a-2ba435aa-59a15a4c-b093cdb5] Open read subsessions to databases: { name: , endpoint: localhost:7981, path: /Root } 2026-01-10T12:09:45.563721Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Starting read session 2026-01-10T12:09:45.563767Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Starting single session 2026-01-10T12:09:45.564286Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-10T12:09:45.564330Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-10T12:09:45.564379Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] Reconnecting session to cluster in 0.000000s 2026-01-10T12:09:45.564589Z :ERROR: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:7981
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:7981. 2026-01-10T12:09:45.564657Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-10T12:09:45.564699Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-10T12:09:45.564861Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:7981" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:7981
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:7981. " } 2026-01-10T12:09:45.566957Z :NOTICE: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:09:45.567013Z :DEBUG: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:7981" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:7981
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:7981. " } 2026-01-10T12:09:45.567134Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Closing read session. Close timeout: 0.010000s 2026-01-10T12:09:45.567185Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:09:45.567225Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:45.567267Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Closing read session. Close timeout: 0.000000s 2026-01-10T12:09:45.567314Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:09:45.567357Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:45.567394Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Closing read session. Close timeout: 0.000000s 2026-01-10T12:09:45.567432Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:09:45.567481Z :INFO: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:45.567557Z :NOTICE: [/Root] [/Root] [5812ae67-1db261ab-15dadbd4-a15e80c8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:09:45.616745Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.616784Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.616803Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.616832Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.616847Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.720204Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.720242Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.720260Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.720284Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.720310Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:45.821426Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:45.821465Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.821483Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:45.821512Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:45.821525Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |86.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:43.506297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:43.506428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:43.506488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:43.506534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:43.506591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:43.506626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:43.506716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:43.506802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:43.507693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:43.507942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:43.611292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:43.611372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:43.627589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:43.627731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:43.627945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:43.636541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:43.636791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:43.637650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:43.637923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:43.640546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:43.640767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:43.642102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:43.642171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:43.642279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:43.642337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:43.642388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:43.642638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:43.813292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.814535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.814717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.814810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.814896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.814997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:43.815935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:48.151160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:09:48.151247Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:48.151280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:09:48.151322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:09:48.151426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:09:48.151475Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:09:48.151607Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:48.151648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:48.151694Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:48.151730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:48.151771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:09:48.151814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:48.151856Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:09:48.151889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:09:48.151966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:48.152011Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:09:48.152055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:09:48.152121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:09:48.152983Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:48.153073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:48.153111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:48.153159Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:09:48.153212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:48.153881Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:48.153961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:48.153992Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:48.154022Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:09:48.154181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:48.154258Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:09:48.156951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:48.157963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:48.158199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:48.158256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:48.158684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:48.158789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:48.158832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:708:2698] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:09:48.162190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:48.162390Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2026-01-10T12:09:48.162532Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:09:48.164649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:48.164904Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:48.165239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:48.165285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:48.165687Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:48.165791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:48.165833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:715:2705] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:48.166299Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:48.166465Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 192us result status StatusSuccess 2026-01-10T12:09:48.166880Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |86.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> TSchemeShardSecretTest::DropNotASecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:46.690133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:46.690229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:46.690311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:46.690338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:46.690369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:46.690388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:46.690462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:46.690528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:46.691207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:46.691434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:46.795008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:46.795069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:46.810114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:46.810250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:46.810443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:46.818041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:46.818277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:46.819037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:46.819284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:46.821435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:46.821628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:46.822819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:46.822878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:46.822948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:46.823008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:46.823060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:46.823300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:46.971918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.972917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.973931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:46.974025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-10T12:09:49.144385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-10T12:09:49.144416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:09:49.145324Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:49.145443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:49.145491Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:09:49.145534Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:09:49.145577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:49.146038Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:49.146091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:09:49.146112Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:09:49.146136Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:09:49.146164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:49.146216Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:09:49.151001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:09:49.151390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:49.151566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:49.151600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:49.151866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:49.151939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.151985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:739:2728] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:49.152431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:49.152662Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 257us result status StatusSuccess 2026-01-10T12:09:49.153008Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-10T12:09:49.155966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:49.156196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2026-01-10T12:09:49.156250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2026-01-10T12:09:49.156393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:09:49.158485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:49.158737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:09:49.159018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:09:49.159066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:09:49.159439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:09:49.159523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.159564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 103 2026-01-10T12:09:49.159968Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:49.160172Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 222us result status StatusSuccess 2026-01-10T12:09:49.160487Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> DataShardVolatile::DistributedWrite >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:47.132649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:47.132732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.132784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:47.132822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:47.132871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:47.132916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:47.133013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.133078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:47.133969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:47.134270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:47.235646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:47.235706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:47.254668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:47.254795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:47.255005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:47.263367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:47.263614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:47.264443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:47.264682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:47.267334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.267530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:47.268747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:47.268810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.268912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:47.268962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:47.269003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:47.269259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:47.532594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.533605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.533753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.533844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.533929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.534736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 9:49.587086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:09:49.587210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:49.588736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:09:49.588857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:09:49.589160Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:49.589268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:49.589339Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:49.589576Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:09:49.589630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:09:49.589784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:49.589865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:09:49.591679Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:49.591726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:49.591942Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:49.591991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:09:49.592314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:09:49.592367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:09:49.592469Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:49.592527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:49.592569Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:09:49.592604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:49.592666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:09:49.592708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:09:49.592747Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:09:49.592783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:09:49.592853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:49.592894Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:09:49.592935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:09:49.593434Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:49.593543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:09:49.593591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:09:49.593641Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:09:49.593686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:09:49.593782Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:09:49.603129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:09:49.603629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:49.604178Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-01-10T12:09:49.605315Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-01-10T12:09:49.605555Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:49.605946Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 402us result status StatusPathDoesNotExist 2026-01-10T12:09:49.606161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:09:49.606471Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:09:49.608429Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2026-01-10T12:09:49.611403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:49.611563Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2026-01-10T12:09:49.611667Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-10T12:09:49.618938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:49.619208Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:49.619530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:49.619575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:49.620156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:49.620248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.620290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:696:2685] TestWaitNotification: OK eventTxId 101 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:46.882918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:46.883010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:46.883062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:46.883104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:46.883160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:46.883190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:46.883259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:46.883371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:46.888539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:46.888976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:47.092393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:47.092453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:47.139984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:47.165855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:47.166050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:47.178551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:47.178734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:47.179517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:47.179896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:47.190237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.190448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:47.191744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:47.191824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.191928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:47.191973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:47.192017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:47.192281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:47.440796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.442200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.442351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.442420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.442491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.443807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.443928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.444675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eration IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:09:49.662933Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:09:49.662956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:09:49.662982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-10T12:09:49.663028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:09:49.663071Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:09:49.663108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:09:49.663178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:09:49.663216Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-10T12:09:49.663240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-10T12:09:49.663278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:09:49.663302Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-10T12:09:49.663326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-10T12:09:49.663374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:09:49.663402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-10T12:09:49.663440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:09:49.663478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-10T12:09:49.663502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-10T12:09:49.663527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-10T12:09:49.665427Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.665526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.665563Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.665619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:09:49.665669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:49.666303Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.666421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.666462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.666497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-10T12:09:49.666533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:49.667463Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.667547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.667579Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.667631Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:09:49.667672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:49.669131Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.669240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.669293Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.669328Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:09:49.669362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:09:49.669439Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:09:49.672482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:49.672638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:49.674333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:49.674419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:49.674631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:49.674681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:49.675094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:49.675205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.675245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:725:2714] TestWaitNotification: OK eventTxId 101 2026-01-10T12:09:49.675703Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:49.675915Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 231us result status StatusSuccess 2026-01-10T12:09:49.676309Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:47.185034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:47.185124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.185164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:47.185198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:47.185235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:47.185265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:47.185367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.185448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:47.186291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:47.186592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:47.276129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:47.276188Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:47.300472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:47.300597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:47.300828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:47.321114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:47.321370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:47.322230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:47.322510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:47.327121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.327394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:47.328804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:47.328879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.328958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:47.329025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:47.329093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:47.329401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:47.487729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.488832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.488984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.489938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.490023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:49.921862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:09:49.921954Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:49.921992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:09:49.922053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:09:49.922117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:09:49.922165Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:09:49.922287Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:49.922329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:49.922377Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:09:49.922412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:49.922458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:09:49.922502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:09:49.922545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:09:49.922583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:09:49.922664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:49.922707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:09:49.922746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:09:49.922781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:09:49.923971Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.924069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.924155Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.924204Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:09:49.924255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:09:49.925238Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.925319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:09:49.925352Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:09:49.925385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:09:49.925418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:49.925491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:09:49.927538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:09:49.928690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:09:49.928920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:09:49.928970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:09:49.929350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:09:49.929449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.929491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:714:2703] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:09:49.932542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:09:49.932704Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2026-01-10T12:09:49.932833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:09:49.934972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:49.935202Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:09:49.935514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:09:49.935558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:09:49.935928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:49.936014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:09:49.936051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:721:2710] TestWaitNotification: OK eventTxId 102 2026-01-10T12:09:49.936490Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:49.936656Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 189us result status StatusSuccess 2026-01-10T12:09:49.937024Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28969, MsgBus: 21775 2026-01-10T12:07:01.091146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703277582777369:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:01.091204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:01.377073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:01.387351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:01.387466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:01.494910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:01.602656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.673910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703277582777343:2081] 1768046821089864 != 1768046821089867 2026-01-10T12:07:01.720232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:01.811662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:01.811690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:01.811697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:01.811780Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:02.102710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:02.741180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:02.753742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:05.693468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:06.044201Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-10T12:07:06.092501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703277582777369:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:06.092773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:06.162211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-10T12:07:06.365129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:06.473950Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-10T12:07:06.723545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:06.820087Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:07:06.821236Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976715671 at tablet 72075186224037891 errors: WRONG_SHARD_STATE (Couldn't deliver stream clearance request for [0:281474976715671] at 72075186224037891) | 2026-01-10T12:07:06.821317Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037891 status: ERROR errors: WRONG_SHARD_STATE (Couldn't deliver stream clearance request for [0:281474976715671] at 72075186224037891) | {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-10T12:07:06.945323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:07.060548Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2026-01-10T12:07:07.222101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:07.315077Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2026-01-10T12:07:07.421952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:07.505654Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2026-01-10T12:07:07.597661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:07.731164Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2026-01-10T12:07:07.811766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:07.900271Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-10T12:07:07.969259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-10T12:07:08.158796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:07:08.245421Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-10T12:07:08.304449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-10T12:07:08.457382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesha ... RROR: schemereq.cpp:624: Actor# [14:7593703925009119735:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:32.510101Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:32.956913Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:33.630616Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:504: Get parsing result with error, self: [14:7593703929304087360:2392], owner: [14:7593703925009119629:2311], statement id: 0 2026-01-10T12:09:33.631127Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=YzcwM2Y5YzAtNWI5MzU2MS1lM2JjMGM2YS03ZWY2NWUyMg==, ActorId: [14:7593703929304087358:2391], ActorState: ExecuteState, LegacyTraceId: 01kekwzmnrfd15j80g6h1gcs0c, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Error while parsing query." severity: 1 issues { position { row: 1 column: 1 } message: "alternative is not implemented yet : 34" end_position { row: 1 column: 1 } severity: 1 } } tx_id# trace_id# 2026-01-10T12:09:33.910097Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7593703929304087389:2404], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:56: Error: At function: PgOp
:2:56: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-10T12:09:33.910718Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=MmI3ZWZlY2ItZDcwNzQ5ZTctYTE2Y2U0ODctMWRlOWM4Nzk=, ActorId: [14:7593703929304087386:2402], ActorState: ExecuteState, LegacyTraceId: 01kekwzmxp8f9mv1zz95pjgr2p, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 56 } message: "At function: PgOp" end_position { row: 2 column: 56 } severity: 1 issues { position { row: 2 column: 56 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 56 } severity: 1 } } } } } } tx_id# trace_id# 2026-01-10T12:09:33.956024Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7593703929304087401:2410], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:58: Error: At function: PgAnd
:2:68: Error: At function: PgOp
:2:68: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-10T12:09:33.956735Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=NTViMWYwZjYtODJjZDM3NWQtNWNmNWU5NjYtNWEzM2FhMmE=, ActorId: [14:7593703929304087398:2408], ActorState: ExecuteState, LegacyTraceId: 01kekwzmz004p5g9acv9s5p5t9, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 58 } message: "At function: PgAnd" end_position { row: 2 column: 58 } severity: 1 issues { position { row: 2 column: 68 } message: "At function: PgOp" end_position { row: 2 column: 68 } severity: 1 issues { position { row: 2 column: 68 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 68 } severity: 1 } } } } } } } tx_id# trace_id# 2026-01-10T12:09:33.974494Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kekwzn0ebe78sr66d8hw6mdk, Database: /Root, SessionId: ydb://session/3?node_id=14&id=YzQ1NGRjMDgtOTk3MmVlMzMtNjNkN2FiNDItYmU4MjdmMWQ=, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-10T12:09:33.974910Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=YzQ1NGRjMDgtOTk3MmVlMzMtNjNkN2FiNDItYmU4MjdmMWQ=, ActorId: [14:7593703929304087411:2414], ActorState: ExecuteState, LegacyTraceId: 01kekwzn0ebe78sr66d8hw6mdk, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-10T12:09:34.025013Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:34.117035Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:34.207095Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7593703933599054877:2439], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2026-01-10T12:09:34.207623Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=Y2ExYTQ2YTMtZmZmN2QyZTktNGEwNWI0ZGUtN2I2MGNhMmY=, ActorId: [14:7593703933599054874:2437], ActorState: ExecuteState, LegacyTraceId: 01kekwzn70f9801p23jhmvjkrg, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "values have 3 columns, INSERT INTO expects: 2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:34.266722Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7593703933599054889:2445], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2026-01-10T12:09:34.267228Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=NjU2MzQyNzktNjM5ZDQyOGEtODQ2YTg3MGItMTgxOGNlY2E=, ActorId: [14:7593703933599054886:2443], ActorState: ExecuteState, LegacyTraceId: 01kekwzn8y2r8p6t767yv36axp, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert type: List> to List>" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert \'id\': pgunknown to Optional" end_position { row: 1 column: 1 } severity: 1 } } issues { position { row: 1 column: 1 } message: "Row type mismatch for table: db.[/Root/PgTable2]" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:34.778015Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kekwzna476f8dvgb84h8w3fm, Database: /Root, SessionId: ydb://session/3?node_id=14&id=MWRmMTI5Y2UtM2MzZDJiOTMtYTI0NmJkMy0yYzIzMjRkMQ==, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-10T12:09:34.778805Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=MWRmMTI5Y2UtM2MzZDJiOTMtYTI0NmJkMy0yYzIzMjRkMQ==, ActorId: [14:7593703933599054898:2449], ActorState: ExecuteState, LegacyTraceId: 01kekwzna476f8dvgb84h8w3fm, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-10T12:09:34.918157Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:35.633376Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2026-01-10T12:09:35.691688Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:47.222804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:47.222930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.223048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:47.223096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:47.223140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:47.223180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:47.223271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.223385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:47.224351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:47.224689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:47.319925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:47.320009Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:47.338735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:47.338885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:47.339119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:47.356423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:47.356759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:47.357611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:47.357905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:47.361397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.361653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:47.363010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:47.363114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.363194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:47.363258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:47.363314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:47.363606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:47.572708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.573826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.573999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.574928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.575040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.575121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:50.561535Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 172us result status StatusSuccess 2026-01-10T12:09:50.561796Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:50.562230Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:50.562338Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 120us result status StatusSuccess 2026-01-10T12:09:50.562706Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:50.563096Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:50.563203Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 115us result status StatusSuccess 2026-01-10T12:09:50.563512Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:50.563939Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:50.564034Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 106us result status StatusSuccess 2026-01-10T12:09:50.572827Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TabletState::ExplicitUnsubscribe >> TSchemeShardSecretTest::CreateSecretOverExistingObject >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> BootstrapperTest::RestartUnavailableTablet >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TabletState::ExplicitUnsubscribe [GOOD] >> TabletState::ImplicitUnsubscribeOnDisconnect >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:47.428961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:47.429109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.429168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:47.429229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:47.429276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:47.429314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:47.429401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:47.429495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:47.430477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:47.430820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:47.536833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:47.536904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:47.557596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:47.557734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:47.557942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:47.566420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:47.566729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:47.567588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:47.568071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:47.571265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.571500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:47.572790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:47.572863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:47.573045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:47.573100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:47.573152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:47.573326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:47.743853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.745968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.746047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.746138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.746238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:47.746329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n_alter_secret.cpp:31: [72057594046678944] TAlterSecret TPropose operationId# 103:0 ProgressState 2026-01-10T12:09:52.266330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-10T12:09:52.266461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:09:52.267736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-10T12:09:52.267919Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter), operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2026-01-10T12:09:52.269387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:09:52.269548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-10T12:09:52.269949Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:52.270084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904048 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:52.270143Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000040 2026-01-10T12:09:52.270281Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-10T12:09:52.270491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:09:52.273222Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:52.273280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:09:52.273495Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:52.273548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:09:52.273879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:52.273935Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:09:52.274049Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:52.274097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:52.274145Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:52.274200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:52.274256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:09:52.274305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:52.274348Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:09:52.274385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:09:52.274473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:09:52.274519Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-10T12:09:52.274555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-10T12:09:52.275557Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:52.275693Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:52.275778Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:52.275832Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:09:52.275879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:52.276001Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:09:52.279500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-10T12:09:52.279811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:09:52.279875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-10T12:09:52.279968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:09:52.279991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:09:52.280489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:09:52.280608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:52.280654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:756:2746] 2026-01-10T12:09:52.280818Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:09:52.280888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:09:52.280915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:756:2746] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-01-10T12:09:52.281371Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:52.281601Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 259us result status StatusSuccess 2026-01-10T12:09:52.284996Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TCmsTest::ManagePermissions >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions >> TS3WrapperTests::GetObject |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessage >> TS3WrapperTests::GetObject [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> TClusterInfoTest::FillInfo [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental >> KqpWorkloadServiceTables::TestLeaseUpdates >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> TCmsTenatsTest::CollectInfo >> TCmsTest::TestOutdatedState >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] >> TCmsTest::RestartNodeInDownState >> KqpPg::ValuesInsert+useSink [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2026-01-10T12:09:53.749767Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2026-01-10T12:09:53.750520Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2026-01-10T12:09:53.750569Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2026-01-10T12:09:53.907133Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> KqpWorkloadService::TestLessConcurrentQueryLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> KqpPg::ValuesInsert-useSink >> TCmsTest::ManagePermissions [GOOD] >> HttpRequest::ProbeBaseStatsServerless [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTenatsTest::CollectInfo [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> TCmsTest::TestOutdatedState [GOOD] >> BasicUsage::ReadMirrored [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TCmsTest::TestSetResetMarkers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> TCmsTenatsTest::RequestShutdownHost |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |86.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:06:32.285973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:06:32.286075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.286128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:06:32.286177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:06:32.286215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:06:32.286263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:06:32.286349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:06:32.286413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:06:32.287313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:06:32.287664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:06:32.417886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:06:32.417970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:32.445301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:06:32.445421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:06:32.445598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:06:32.492457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:06:32.492786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:06:32.493622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:06:32.495731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:06:32.503051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.503277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:06:32.504654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:06:32.504729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:06:32.504815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:06:32.504864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:06:32.504907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:06:32.505173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:06:32.719290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.720647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.720812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.720898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.720972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:06:32.721793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 51615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:53.879092Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:53.879132Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:53.880354Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:53.880457Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:53.880497Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:53.880536Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-10T12:09:53.880587Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-10T12:09:53.880729Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2026-01-10T12:09:53.882303Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:53.882549Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:09:53.882940Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:09:53.883124Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-10T12:09:53.883188Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-10T12:09:53.883267Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-10T12:09:53.883310Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-10T12:09:53.883443Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2026-01-10T12:09:53.883568Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [20:786:2753] message: TxId: 103 2026-01-10T12:09:53.883683Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-10T12:09:53.883787Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:09:53.883865Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:09:53.884054Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:09:53.884158Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-10T12:09:53.884192Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-10T12:09:53.884237Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:53.884267Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-10T12:09:53.884298Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-10T12:09:53.884353Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:09:53.884387Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:3 2026-01-10T12:09:53.884412Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:3 2026-01-10T12:09:53.884449Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:09:53.884494Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:4 2026-01-10T12:09:53.884526Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:4 2026-01-10T12:09:53.884635Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-10T12:09:53.886166Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:09:53.886296Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-10T12:09:53.886454Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:09:53.886561Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:09:53.886612Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:09:53.887071Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.887710Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.889342Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.889423Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.889590Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.891594Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:53.893960Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:53.894071Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [20:1140:3040] 2026-01-10T12:09:53.894304Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2026-01-10T12:09:53.895327Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:53.895818Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 571us result status StatusPathDoesNotExist 2026-01-10T12:09:53.896393Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:09:53.897228Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:09:53.897654Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 501us result status StatusPathDoesNotExist 2026-01-10T12:09:53.897894Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:52.047690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:52.047787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:52.047831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:52.047872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:52.047914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:52.047960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:52.048089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:52.048175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:52.049121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:52.049445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:52.161225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:52.161292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:52.181429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:52.181541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:52.181718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:52.195074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:52.195308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:52.196067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:52.196339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:52.200018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:52.200266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:52.201533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:52.201598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:52.201681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:52.201742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:52.201788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:52.202084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:52.358554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.359694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.359866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.359947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.360889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : true 2026-01-10T12:09:55.727481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:09:55.727698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:09:55.728390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-10T12:09:55.729029Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:55.729189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:55.729252Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:09:55.729442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-10T12:09:55.729647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:09:55.729725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:55.733037Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:55.733091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:09:55.733268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:09:55.733406Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:55.733445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:847:2804], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:09:55.733489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:847:2804], at schemeshard: 72057594046678944, txId: 103, path id: 39 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:09:55.733909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:09:55.733963Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:09:55.734171Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:55.734216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:55.734266Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:09:55.734309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:55.734355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:09:55.734410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:09:55.734452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:09:55.734490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:09:55.734586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:09:55.734635Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-10T12:09:55.734706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-10T12:09:55.734748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:09:55.735552Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:55.735682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:55.735722Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:55.735796Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:09:55.735852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:09:55.736622Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:55.736708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:09:55.736740Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:09:55.736786Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:09:55.736823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:09:55.737009Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:09:55.744140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:09:55.744280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:09:55.744589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:09:55.744650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:09:55.745161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:09:55.745270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:09:55.745316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:880:2835] TestWaitNotification: OK eventTxId 103 2026-01-10T12:09:55.755370Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:55.755694Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 390us result status StatusSuccess 2026-01-10T12:09:55.756236Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2026-01-10T12:08:37.089825Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703688530889453:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:37.089947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:37.199224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:37.622529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:37.622688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:37.729039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:37.808916Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:37.812565Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703684235922036:2081] 1768046916989680 != 1768046916989683 2026-01-10T12:08:37.822879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:38.008207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:38.025699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:38.025720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:38.025730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:38.025810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:38.138505Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:38.415985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:38.423398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:08:42.092333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703688530889453:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:42.092398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:42.295732Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:42.300558Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:42.300602Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:42.300882Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703710005726685:2324], Start check tables existence, number paths: 2 2026-01-10T12:08:42.302631Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:42.303352Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703710005726685:2324], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:42.303428Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703710005726685:2324], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:42.303472Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703710005726685:2324], Successfully finished 2026-01-10T12:08:42.303536Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:42.307429Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YWI1MGIzMTktZWQwNTA5YTUtNzkzOGFlOGEtNmRjNjExMWE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YWI1MGIzMTktZWQwNTA5YTUtNzkzOGFlOGEtNmRjNjExMWE= temp_dir_name# d387b725-4f51-4f60-37f6-91b1a88459e0 trace_id# 2026-01-10T12:08:42.312397Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YWI1MGIzMTktZWQwNTA5YTUtNzkzOGFlOGEtNmRjNjExMWE=, ActorId: [1:7593703710005726703:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:42.328875Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:42.333865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:42.336526Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:08:42.339311Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:42.345459Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:08:42.404228Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:42.466329Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703710005726756:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:42.466492Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703710005726705:2501], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:42.475133Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NjY1ZTA5MzctODE4OGZiNDktYjI5YjhkZTMtMjM3YWQxYTg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NjY1ZTA5MzctODE4OGZiNDktYjI5YjhkZTMtMjM3YWQxYTg= temp_dir_name# 5d29c51d-4018-4c05-832b-39aefbe1c483 trace_id# 2026-01-10T12:08:42.475450Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NjY1ZTA5MzctODE4OGZiNDktYjI5YjhkZTMtMjM3YWQxYTg=, ActorId: [1:7593703710005726763:2328], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:42.475739Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:42.475762Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:08:42.475807Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NjY1ZTA5MzctODE4OGZiNDktYjI5YjhkZTMtMjM3YWQxYTg=, ActorId: [1:7593703710005726763:2328], ActorState: ReadyState, LegacyTraceId: 01kekwy2qb1er0ay1m8rjf4hm3, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7593703710005726762:2538] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-10T12:08:42.475822Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:42.475876Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7593703710005726763:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NjY1ZTA5MzctODE4OGZiNDktYjI5YjhkZTMtMjM3YWQxYTg= 2026-01-10T12:08:42.475887Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703710005726765:2329], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:08:42.475948Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703710005726766:2330], Database: /Root, Start database fetching 2026-01-10T12:08:42.477505Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703710005726765:2329], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:08:42.477568Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-10T12:08:42.477582Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:08:42.477835Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7593703710005726776:2331], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:08:42.477883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703710005726766:2330], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-10T12:08:42.477930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] ... node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Forwarded TEvStreamData to [10:7593704011799539441:3868] trace_id# 2026-01-10T12:09:52.336050Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:09:52.336292Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, TxInfo status# Committed kind# ReadOnly total_duration# 27.86 server_duration# 27.755 queries_count# 2 trace_id# 2026-01-10T12:09:52.336387Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:09:52.336964Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:52.337010Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, EndCleanup is_final# true trace_id# 2026-01-10T12:09:52.337085Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: ExecuteState, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Sent query response back to proxy proxy_request_id# 5 proxy_id# [12:7593703981040810958:2266] trace_id# 2026-01-10T12:09:52.337109Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: unknown state, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:52.337817Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=YzcyMjY3N2EtOGY0MzJhYTItMWE4MmE3NjEtOWMyY2YyN2U=, ActorId: [12:7593704011105583172:2396], ActorState: unknown state, LegacyTraceId: 01kekx06nrcx4jfhm30hq7tdhz, Session actor destroyed trace_id# 2026-01-10T12:09:52.356538Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY= temp_dir_name# b928290f-4d77-c91b-4698-e19786f3f932 trace_id# 2026-01-10T12:09:52.356736Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:09:52.357359Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ReadyState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpc_actor# [10:7593704011799539451:3872] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-10T12:09:52.357408Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ReadyState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Request placed into pool from cache pool_id# default trace_id# 2026-01-10T12:09:52.357512Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Sending CompileQuery request trace_id# 2026-01-10T12:09:52.558143Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136822977984920 trace_id# 2026-01-10T12:09:52.558248Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:09:52.558473Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Created new KQP executer executer_id# [12:7593704011105583202:2406] is_rollback# false trace_id# 2026-01-10T12:09:52.577451Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Forwarded TEvStreamData to [10:7593704011799539451:3872] trace_id# 2026-01-10T12:09:52.578929Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:09:52.579149Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, TxInfo status# Committed kind# ReadOnly total_duration# 21.184 server_duration# 21.054 queries_count# 2 trace_id# 2026-01-10T12:09:52.579246Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:09:52.579838Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:52.579889Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, EndCleanup is_final# true trace_id# 2026-01-10T12:09:52.579971Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: ExecuteState, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Sent query response back to proxy proxy_request_id# 6 proxy_id# [12:7593703981040810958:2266] trace_id# 2026-01-10T12:09:52.580002Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: unknown state, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:52.580512Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=Yjc4OTAxNDUtOWY3ZGJjNTctNjQ4ZjhmZWUtOWZhZDZjYzY=, ActorId: [12:7593704011105583196:2406], ActorState: unknown state, LegacyTraceId: 01kekx06z59gme3xjm5adpq21h, Session actor destroyed trace_id# 2026-01-10T12:09:52.591602Z node 12 :HIVE WARN: tx__status.cpp:58: HIVE#72075186224037888 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-10T12:09:52.594806Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-10T12:09:52.595274Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:09:52.597090Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-10T12:09:52.597430Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:09:52.603107Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=OGMxN2NlNTktMWViMDhiOTUtZTkxNjBiNjctN2QyYTM4MGY=, ActorId: [10:7593703981734767146:2333], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:09:52.603165Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=OGMxN2NlNTktMWViMDhiOTUtZTkxNjBiNjctN2QyYTM4MGY=, ActorId: [10:7593703981734767146:2333], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:09:52.603216Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=OGMxN2NlNTktMWViMDhiOTUtZTkxNjBiNjctN2QyYTM4MGY=, ActorId: [10:7593703981734767146:2333], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:09:52.603258Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=OGMxN2NlNTktMWViMDhiOTUtZTkxNjBiNjctN2QyYTM4MGY=, ActorId: [10:7593703981734767146:2333], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:09:52.603349Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=OGMxN2NlNTktMWViMDhiOTUtZTkxNjBiNjctN2QyYTM4MGY=, ActorId: [10:7593703981734767146:2333], ActorState: unknown state, Session actor destroyed trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:09:52.587692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:09:52.587825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:52.587941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:09:52.587993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:09:52.588041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:09:52.588091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:09:52.588215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:09:52.588294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:09:52.589289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:09:52.589643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:09:52.682789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:09:52.682889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:52.701665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:09:52.701834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:09:52.702074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:09:52.712749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:09:52.713029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:09:52.713872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:09:52.714154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:09:52.716881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:52.717129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:09:52.718530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:09:52.718607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:09:52.718687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:09:52.718758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:09:52.718807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:09:52.719093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:09:52.863905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.864866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:09:52.865704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:55.066725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:55.066780Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:55.066815Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 6 2026-01-10T12:09:55.066849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:09:55.068064Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:55.068160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:09:55.068190Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:09:55.068225Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2026-01-10T12:09:55.068281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:09:55.068354Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:09:55.071972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:09:55.072121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:09:55.072219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:09:55.072560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:09:55.072615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:09:55.073077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:09:55.073178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:09:55.073220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:850:2839] TestWaitNotification: OK eventTxId 105 2026-01-10T12:09:55.073848Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:55.074054Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 274us result status StatusSuccess 2026-01-10T12:09:55.074410Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:55.075022Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:55.075155Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 200us result status StatusSuccess 2026-01-10T12:09:55.075466Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:09:55.075995Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:09:55.076171Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 199us result status StatusSuccess 2026-01-10T12:09:55.076540Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2026-01-10T12:09:54.035081Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# A4498A44-B5CA-4FBC-B69C-5DACD64A266D, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:10466 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6B87E9D7-A500-4C17-85DF-130BE19FD610 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-10T12:09:54.045126Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# A4498A44-B5CA-4FBC-B69C-5DACD64A266D, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-10T12:09:54.045523Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 87D0338F-ECE0-4DB5-846C-B08305C822BF, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:10466 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3145DD15-2B0F-446D-B9AB-228FBF61D009 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-10T12:09:54.048683Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 87D0338F-ECE0-4DB5-846C-B08305C822BF, response# GetObjectResult { } |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |86.3%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:17.892232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:18.014116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:18.021763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:18.022213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:18.022311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:18.492756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:18.626666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:18.626822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:18.666713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:18.793281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:19.570514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:19.571312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.571375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.571401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.571909Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:19.648753Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:20.291749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:24.848200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:24.858058Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:24.862149Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:24.922755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.922896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.959377Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:24.968301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.203979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:25.204133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:25.205721Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.206492Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.207220Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.210745Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.210897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.211213Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.211401Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.211596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.211742Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.230162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.505606Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:25.533790Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:25.533908Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:25.575287Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:25.577065Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:25.577348Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:25.577421Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:25.577486Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:25.577559Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:25.577620Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:25.577681Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:25.578356Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:25.578935Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1913:2455] 2026-01-10T12:08:25.584320Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:08:25.594735Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Describe result: PathErrorUnknown 2026-01-10T12:08:25.594818Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Creating table 2026-01-10T12:08:25.594911Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:08:25.612825Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2138:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:25.622702Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2120:2578] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-10T12:08:25.626803Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-10T12:08:25.631325Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:25.631445Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2148:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:25.681645Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2221:2651] 2026-01-10T12:08:25.681960Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2221:2651], schemeshard id = 72075186224037897 2026-01-10T12:08:25.825760Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:26.059402Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:08:26.374687Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:08:26.377501Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2564:2879], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:26.392167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:26.398373Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:26.398515Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Subscribe on create table tx: 2814749767 ... yte size: 25, entries count: 1, are all stats full: 0 2026-01-10T12:09:34.716630Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:36.021491Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 52 ], ReplyToActorId[ [2:6313:5058]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:36.022022Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 52 ] 2026-01-10T12:09:36.022075Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 52, ReplyToActorId = [2:6313:5058], StatRequests.size() = 1 2026-01-10T12:09:36.414044Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:36.414124Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:36.414426Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:09:36.434362Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:37.356329Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 53 ], ReplyToActorId[ [2:6349:5077]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:37.356799Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 53 ] 2026-01-10T12:09:37.356856Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 53, ReplyToActorId = [2:6349:5077], StatRequests.size() = 1 2026-01-10T12:09:38.484884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:38.700775Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 54 ], ReplyToActorId[ [2:6387:5095]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:38.701072Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 54 ] 2026-01-10T12:09:38.701110Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 54, ReplyToActorId = [2:6387:5095], StatRequests.size() = 1 2026-01-10T12:09:39.935667Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 55 ], ReplyToActorId[ [2:6421:5110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:39.936109Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 55 ] 2026-01-10T12:09:39.936161Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 55, ReplyToActorId = [2:6421:5110], StatRequests.size() = 1 2026-01-10T12:09:40.641020Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:09:40.641091Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:40.641124Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:09:40.641152Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:09:41.930411Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:6464:5130]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:41.930947Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-10T12:09:41.931001Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:6464:5130], StatRequests.size() = 1 2026-01-10T12:09:42.552089Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:09:42.552389Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-10T12:09:42.552711Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-10T12:09:43.112170Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-10T12:09:43.112261Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.249000s, at schemeshard: 72075186224037899 2026-01-10T12:09:43.112658Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-10T12:09:43.128878Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:43.715574Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:6504:5150]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:43.715991Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-10T12:09:43.716043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:6504:5150], StatRequests.size() = 1 2026-01-10T12:09:44.637040Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:44.637122Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:44.637515Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:09:44.652729Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:46.658002Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:6542:5171]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:46.658498Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-10T12:09:46.658555Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:6542:5171], StatRequests.size() = 1 2026-01-10T12:09:48.757549Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:6574:5186]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:48.757890Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-10T12:09:48.757934Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:6574:5186], StatRequests.size() = 1 2026-01-10T12:09:50.314193Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:09:50.598259Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:6620:5212]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:50.598563Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-10T12:09:50.598608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:6620:5212], StatRequests.size() = 1 2026-01-10T12:09:52.155738Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:6658:5231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:52.156191Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-10T12:09:52.156246Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:6658:5231], StatRequests.size() = 1 2026-01-10T12:09:53.460897Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:09:53.460982Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.411000s, at schemeshard: 72075186224037899 2026-01-10T12:09:53.461250Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 27, entries count: 1, are all stats full: 1 2026-01-10T12:09:53.479449Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:53.599453Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:6694:5248]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:53.599848Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-10T12:09:53.599899Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:6694:5248], StatRequests.size() = 1 2026-01-10T12:09:53.999027Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:09:53.999381Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-10T12:09:53.999543Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-10T12:09:54.080395Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:09:54.080477Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:54.080749Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:09:54.101834Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:55.101371Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:6736:5272]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:55.101858Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-10T12:09:55.101922Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:6736:5272], StatRequests.size() = 1 2026-01-10T12:09:55.103060Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:6739:5275]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:55.103395Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-10T12:09:55.103478Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:6739:5275], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11296 }' |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests |86.3%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2026-01-10T12:09:14.487647Z :PropagateSessionClosed INFO: Random seed for debugging is 1768046954487607 2026-01-10T12:09:15.178890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703853049907127:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:15.178947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:15.380746Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.405211Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703852784529621:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:15.405298Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:15.517339Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.811518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.019960Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.117623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.196219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:16.208211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.262043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.262134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.281121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.281211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.443875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.464936Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.489978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.490241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:16.605115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:16.605971Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:16.611525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.716727Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:16.900528Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.023195s 2026-01-10T12:09:17.323835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0022a1/r3tmp/yandexZamDhE.tmp 2026-01-10T12:09:17.323862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0022a1/r3tmp/yandexZamDhE.tmp 2026-01-10T12:09:17.324036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0022a1/r3tmp/yandexZamDhE.tmp 2026-01-10T12:09:17.324172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:17.418608Z INFO: TTestServer started on Port 9302 GrpcPort 32099 PQClient connected to localhost:32099 2026-01-10T12:09:17.902081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:20.184194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703853049907127:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:20.184293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:20.396432Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703852784529621:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:20.396535Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:22.005872Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703878554333723:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.006053Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.006493Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882849301039:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.011326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882849301061:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.011415Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.011735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703882849301066:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.011784Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.017731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:22.077660Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703882849301041:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:22.238873Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703882849301073:2144] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:22.543969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:22.553220Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703882849301080:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:22.557307Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703883114679502:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:22.560026Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTk3MTFkMTAtNjcxYWIxNWYtNDk3YzgzYWQtMTMwOGYyZjM=, ActorId: [1:7593703883114679475:2333], ActorState: ExecuteState, LegacyTraceId: 01kekwz9gb1bb7d382hhkgmn89, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and u ... : [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7593704017977449428:2524] disconnected; active server actors: 1 2026-01-10T12:09:55.284368Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7593704017977449428:2524] client user disconnected session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.284455Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704017977449429:2524] disconnected. 2026-01-10T12:09:55.284743Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [3:7593704022272416869:2541] destroyed 2026-01-10T12:09:55.284473Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704017977449429:2524] disconnected; active server actors: 1 2026-01-10T12:09:55.284494Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704017977449429:2524] client user disconnected session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.284785Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.284809Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [3:7593704017977449442:2530] destroyed 2026-01-10T12:09:55.284533Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7593704017977449430:2524] disconnected. 2026-01-10T12:09:55.284843Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:09:55.284548Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7593704017977449430:2524] disconnected; active server actors: 1 2026-01-10T12:09:55.284871Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.284564Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7593704017977449430:2524] client user disconnected session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.284886Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.284899Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.284918Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.284930Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.284969Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.284986Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [3:7593704017977449444:2532] destroyed 2026-01-10T12:09:55.285006Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.285026Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7593704017977449443:2531] destroyed 2026-01-10T12:09:55.285077Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.285094Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.285106Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_17808203987261500938_v1 2026-01-10T12:09:55.308208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.308277Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.308306Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.308330Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.308342Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.312675Z :INFO: [/Root] [/Root] [37746e15-1362479e-fcccb8cc-e5eda1b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 464 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:09:55.312805Z :NOTICE: [/Root] [/Root] [37746e15-1362479e-fcccb8cc-e5eda1b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:09:55.356208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.356208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.356245Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.356246Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.356260Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.356260Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.356282Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.356282Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.356296Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.356296Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.409963Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.410004Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.410024Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.410054Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.410070Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.458747Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.458784Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.458799Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.458821Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.458834Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.458898Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.458909Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.458919Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.458931Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.458941Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.510053Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.510097Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.510119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.510141Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.510155Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.559307Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.559345Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.559358Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.559380Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.559394Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:09:55.559511Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:09:55.559529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.559543Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:09:55.559559Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:09:55.559569Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:09:56.006179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:09:56.006205Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> TCmsTest::WalleRebootDownNode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 |86.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks |86.3%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |86.3%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> TCmsTest::StateRequest |86.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTest::StateStorageTwoRings >> TCmsTest::StateRequestUnknownNode >> TCmsTest::ManualRequestApproval >> TCmsTest::DynamicConfig |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> TestSqsTopicHttpProxy::TestDeleteMessage [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/tests/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |86.3%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/mixedpy/tests/py3test >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestRestartServices >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> TestSqsTopicHttpProxy::TestDeleteMessageBatch |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> TCmsTest::WalleRebootDownNode [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> TCmsTest::PermissionDuration |86.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::WalleCleanupTest |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> DataShardVolatile::DistributedWriteThenSplit >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TCmsTest::ManageRequests [GOOD] |86.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 |86.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TM] {RESULT} ydb/tests/stress/mixedpy/tests/py3test >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks [GOOD] >> TCmsTest::RequestReplaceBrokenDevices >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestProcessingQueue |86.3%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageTwoBrokenRings |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |86.3%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTenatsTest::TestTenantLimit >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::CreateTime >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::ManualRequestApproval [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> IncrementalBackup::OmitIndexesIncrementalBackup [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::TestProcessingQueue [GOOD] |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2026-01-10T12:08:45.305088Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703723085605589:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:45.305153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:45.800106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:45.854111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:45.854225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:45.965042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:46.081298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:46.112005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:46.384344Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:46.490037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:46.490066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:46.490074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:46.490198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:47.089359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:49.934590Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:49.938710Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703740265476057:2328], Start check tables existence, number paths: 2 2026-01-10T12:08:49.939521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:49.939557Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:49.961397Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:49.961521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703740265476057:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:49.961587Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703740265476057:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:49.961620Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703740265476057:2328], Successfully finished 2026-01-10T12:08:49.961738Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:49.964802Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=OTFhYjEwZDQtMzg1YTE0MzItM2Y2ZjNiZWMtOTdhNmZjZTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# OTFhYjEwZDQtMzg1YTE0MzItM2Y2ZjNiZWMtOTdhNmZjZTE= temp_dir_name# 87f8dfe0-4897-3dd0-374e-91b2a5041b42 trace_id# 2026-01-10T12:08:49.968211Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=OTFhYjEwZDQtMzg1YTE0MzItM2Y2ZjNiZWMtOTdhNmZjZTE=, ActorId: [1:7593703740265476087:2332], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:50.072935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:50.126767Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703743523924834:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:50.127098Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:50.194256Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:50.246339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:50.246434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:50.256896Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:08:50.257922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:50.280176Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-10T12:08:50.306441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703723085605589:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:50.306513Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:50.401245Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:50.401329Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:50.404493Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.404698Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.404795Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.404861Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.404907Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.404985Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.405091Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.405206Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.405276Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:50.432990Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:50.767283Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-10T12:08:50.767785Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:51.141165Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:51.173382Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:51.204148Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:51.204167Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:51.204300Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:51.292233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:51.342946Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703749490661388:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:51.343018Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:51.448305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:51.448380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:51.452667Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-10T12:08:51.452953Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:51.478171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:51.574858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:51.574973Z node 2 :HIVE WARN: node_info.cpp:25: ... # 2026-01-10T12:10:02.446502Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025017:2454], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.446627Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.448912Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: ExecuteState, LegacyTraceId: 01kekx0gshd15avzfgpqyeyw5n, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [10:7593704052542024969:2327] workload_service_cleanup# false trace_id# 2026-01-10T12:10:02.452051Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: CleanupState, LegacyTraceId: 01kekx0gshd15avzfgpqyeyw5n, EndCleanup is_final# false trace_id# 2026-01-10T12:10:02.452205Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: CleanupState, LegacyTraceId: 01kekx0gshd15avzfgpqyeyw5n, Sent query response back to proxy proxy_request_id# 17 proxy_id# [10:7593704005297383395:2152] trace_id# 2026-01-10T12:10:02.459526Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U= temp_dir_name# d0680943-4ac2-5ee9-0ee7-4793adbe5a69 trace_id# 2026-01-10T12:10:02.460058Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:10:02.460182Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-10T12:10:02.460216Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-10T12:10:02.460250Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025030:2456], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-10T12:10:02.460729Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025030:2456], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.460826Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.460888Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-10T12:10:02.460918Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025032:2457], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-10T12:10:02.461341Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025032:2457], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.461394Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.464323Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: ReadyState, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Received request proxy_request_id# 19 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [10:7593704052542025027:2807] database# Root database_id# /Root pool_id# my_pool trace_id# 2026-01-10T12:10:02.464419Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [10:7593704052542025028:2455], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U= 2026-01-10T12:10:02.464493Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [10:7593704052542025034:2458], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, Start pool fetching 2026-01-10T12:10:02.464530Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025035:2459], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-10T12:10:02.464819Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704052542025035:2459], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.464933Z node 10 :KQP_WORKLOAD_SERVICE ERROR: scheme_actors.cpp:56: [WorkloadService] [TPoolResolverActor] ActorId: [10:7593704052542025034:2458], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-10T12:10:02.465071Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:114: [WorkloadService] [TPoolResolverActor] ActorId: [10:7593704052542025034:2458], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-10T12:10:02.465193Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:553: [WorkloadService] [Service] Reply continue error NOT_FOUND to [10:7593704052542025028:2455]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-10T12:10:02.465459Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: ExecuteState, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool status# NOT_FOUND issues# { message: "Failed to resolve pool id my_pool" severity: 1 issues { message: "Resource pool my_pool not found or you don\'t have access permissions" severity: 1 } } trace_id# 2026-01-10T12:10:02.465612Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: ExecuteState, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-10T12:10:02.465756Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [10:7593704052542025028:2455], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U= 2026-01-10T12:10:02.465822Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: CleanupState, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, EndCleanup is_final# true trace_id# 2026-01-10T12:10:02.465946Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: CleanupState, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Sent query response back to proxy proxy_request_id# 19 proxy_id# [10:7593704005297383395:2152] trace_id# 2026-01-10T12:10:02.465988Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: unknown state, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:02.466127Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=Nzc1NDgyNjktZjlmZTE4ZjctYjQ4NjUwZmEtMWYwNDNkY2U=, ActorId: [10:7593704052542025028:2455], ActorState: unknown state, LegacyTraceId: 01kekx0gv026h3hxdahdrjrzak, Session actor destroyed trace_id# 2026-01-10T12:10:02.489402Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:02.489473Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:02.489517Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:02.489558Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:02.489659Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=OTAzZTg1NWItYjZhODY5NjEtMzlmNzdkNGMtNWU1NjMzMDQ=, ActorId: [10:7593704031067187949:2327], ActorState: unknown state, Session actor destroyed trace_id# |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2026-01-10T12:09:56.122053Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-10T12:09:56.254544Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-10T12:09:56.286131Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-10T12:09:56.407406Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-10T12:10:03.618890Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 26:26 2026-01-10T12:10:03.618962Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 27:27 2026-01-10T12:10:03.618987Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 28:28 2026-01-10T12:10:03.619010Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 29:29 2026-01-10T12:10:03.619033Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 30:30 2026-01-10T12:10:03.619053Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 31:31 2026-01-10T12:10:03.619073Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 32:32 2026-01-10T12:10:03.619111Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 25:25 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceBrokenDevices [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> IncrementalBackup::BasicIndexIncrementalRestore >> TCmsTenatsTest::RequestRestartServices [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::DisableMaintenance >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> TCmsTest::SamePriorityRequest2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> TCmsTest::ManualRequestApprovalAlreadyLockedNode [GOOD] >> TCmsTest::ManageRequestsWrong |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::RacyStartCollecting [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::WalleDisableMaintenance >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::ActionWithZeroDuration >> TCmsTest::WalleTasks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTenatsTest::TestNoneTenantPolicy >> TestSqsTopicHttpProxy::TestDeleteMessageBatch [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::PriorityLocks |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |86.4%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |86.4%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console >> TCmsTest::TestKeepAvailableModeScheduled |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> DataShardVolatile::DistributedWriteThenReadIterator >> TCmsTest::RequestReplaceDevices >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-10T12:09:54.277379Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704020719027703:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:54.277586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:54.559465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:54.574081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:54.574181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:54.711005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:54.770828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:54.771664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704020719027593:2081] 1768046994270276 != 1768046994270279 2026-01-10T12:09:54.850642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:54.919281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:54.919304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:54.919309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:54.919390Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:55.150703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:55.285860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:55.385908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:09:55.393353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:55.415536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:09:55.506602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.661166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.714304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:09:55.718540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.775717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-10T12:09:55.783815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.829478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.871063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.904966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.939357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.974663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:56.008382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:57.984886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704033603931148:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:57.984885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704033603931136:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:57.984991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:57.985308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704033603931151:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:57.985395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:57.989700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:58.006060Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704033603931150:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:09:58.083487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704037898898499:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:58.592993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:58.666899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:58.731513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:58.779593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... s before, last time 2026-01-10T12:10:08.000000Z 2026-01-10T12:10:08.797004Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2026-01-10T12:10:08.797005Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:53302) <- (200 , 436 bytes) 2026-01-10T12:10:08.797022Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1768047008791 queuesize 0 startOffset 0 2026-01-10T12:10:08.797052Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:10:08.797102Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2026-01-10T12:10:08.797131Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:53302) connection closed Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"SequenceNumber":"0","Id":"Id-2","MD5OfMessageBody":"7034dd2039d12b6dd94a9e6dfb820b77","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"SequenceNumber":"0","Id":"Id-3","MD5OfMessageBody":"f23251df60f088df56a4be0a5fb1ae75","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-10T12:10:08.797172Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:10:08.797204Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-10T12:10:08.798145Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:53304) incoming connection opened 2026-01-10T12:10:08.798213Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:53304) -> (POST /Root, 101 bytes) 2026-01-10T12:10:08.798356Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58c1:c493:a97b:0:40c1:c493:a97b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: cda6aef3-5046eb24-1ddacc3f-dfe26943 2026-01-10T12:10:08.798715Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [cda6aef3-5046eb24-1ddacc3f-dfe26943] got new request from [58c1:c493:a97b:0:40c1:c493:a97b:0] database '/Root' stream '' 2026-01-10T12:10:08.799131Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [cda6aef3-5046eb24-1ddacc3f-dfe26943] [auth] Authorized successfully 2026-01-10T12:10:08.799175Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [cda6aef3-5046eb24-1ddacc3f-dfe26943] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:10:08.800018Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1768047013799 VisibilityDeadlineMilliseconds: 1768047038799 MaxNumberOfMessages: 10 2026-01-10T12:10:08.801077Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-10T12:10:08.801101Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-10T12:10:08.801174Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 3 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2026-01-10T12:10:08.801275Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 195 count 3 last offset 0, current partition end offset: 3 2026-01-10T12:10:08.801283Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-10T12:10:08.801321Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 195 accessed 2 times before, last time 2026-01-10T12:10:08.000000Z 2026-01-10T12:10:08.801345Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-10T12:10:08.801371Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:10:08.801444Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2026-01-10T12:10:08.801526Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:10:08.802210Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SentTimestamp":"1768047008791","MessageGroupId":"MessageGroupId-1"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-1","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"MD5OfBody":"7034dd2039d12b6dd94a9e6dfb820b77","Attributes":{"SentTimestamp":"1768047008792"},"ReceiptHandle":"CAAQAQ==","Body":"MessageBody-2","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"MD5OfBody":"f23251df60f088df56a4be0a5fb1ae75","Attributes":{"SentTimestamp":"1768047008792"},"ReceiptHandle":"CAAQAg==","Body":"MessageBody-3","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-10T12:10:08.802519Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [cda6aef3-5046eb24-1ddacc3f-dfe26943] reply ok 2026-01-10T12:10:08.802745Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:53304) <- (200 , 641 bytes) 2026-01-10T12:10:08.802845Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:53304) connection closed 2026-01-10T12:10:08.804246Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:53310) incoming connection opened 2026-01-10T12:10:08.804317Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:53310) -> (POST /Root, 397 bytes) 2026-01-10T12:10:08.804453Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58d9:c493:a97b:0:40d9:c493:a97b:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 91dbf334-a54aeab0-84ad9568-78d3f871 2026-01-10T12:10:08.804976Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessageBatch] requestId [91dbf334-a54aeab0-84ad9568-78d3f871] got new request from [58d9:c493:a97b:0:40d9:c493:a97b:0] database '/Root' stream '' 2026-01-10T12:10:08.805502Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessageBatch] requestId [91dbf334-a54aeab0-84ad9568-78d3f871] [auth] Authorized successfully 2026-01-10T12:10:08.805544Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessageBatch] requestId [91dbf334-a54aeab0-84ad9568-78d3f871] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:10:08.806706Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 2 Offset: 0 Offset: 1 2026-01-10T12:10:08.807831Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:08.807852Z node 2 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:10:08.807876Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:10:08.807886Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:08.807902Z node 2 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:10:08.807947Z node 2 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 3 (startOffset 0) session 2026-01-10T12:10:08.807961Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:10:08.807972Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:10:08.807984Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:10:08.808138Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV Http output full {"Successful":[{"Id":"delete-id-2"},{"Id":"delete-id-0"},{"Id":"delete-id-1"}],"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"delete-invalid","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2026-01-10T12:10:08.809729Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [91dbf334-a54aeab0-84ad9568-78d3f871] reply ok 2026-01-10T12:10:08.810032Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:53310) <- (200 , 219 bytes) 2026-01-10T12:10:08.810138Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:53310) connection closed 2026-01-10T12:10:08.810619Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:10:08.810670Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:10:08.810706Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:10:08.810731Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:08.810744Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:08.810755Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:08.810770Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:08.810780Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:10:08.810806Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:10:08.849815Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:08.849855Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:08.849867Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:08.849886Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:08.849896Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TMaintenanceApiTest::DisableMaintenance [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> TCmsTest::PriorityLocks [GOOD] >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::DisableMaintenance [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> TCmsTest::WalleDisableMaintenance [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityLocks [GOOD] Test command err: 2026-01-10T12:10:05.831442Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-10T12:10:05.831517Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-10T12:10:05.831564Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-10T12:10:05.831590Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-10T12:10:05.831618Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-10T12:10:05.831644Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-10T12:10:05.831670Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-10T12:10:05.831695Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-10T12:10:05.852004Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-10T12:10:05.852225Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-10T12:10:05.852262Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-10T12:10:05.852288Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-10T12:10:05.852314Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-10T12:10:05.852340Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-10T12:10:05.852366Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-10T12:10:05.852391Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-10T12:10:05.989802Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-10T12:10:05.989878Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-10T12:10:05.989906Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-10T12:10:05.989931Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-10T12:10:05.989955Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-10T12:10:05.989997Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-10T12:10:05.990022Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-10T12:10:05.990044Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ManageRequestsDry [GOOD] Test command err: 2026-01-10T12:10:02.303755Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-10T12:10:02.304751Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-10T12:10:02.304803Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2026-01-10T12:10:02.705280Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-10T12:10:02.706173Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-10T12:10:02.706245Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableMaintenance [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] Test command err: 2026-01-10T12:10:08.502480Z node 25 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006399s |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableMode >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::SysTabletsNode [GOOD] >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TxOrderInternals::OperationOrder [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> BasicUsage::PreferredDatabaseNoFallback [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] >> Initializer::Simple [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2026-01-10T12:08:20.888117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703617472687368:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:20.888191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:21.277905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:21.283409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:21.283512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:21.338458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:21.428717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.564755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:21.694741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:21.694769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:21.694776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:21.694875Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:21.916308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:22.094031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:25.402514Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:25.404161Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703638947524667:2320], Start check tables existence, number paths: 2 2026-01-10T12:08:25.407181Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:25.407206Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:25.417922Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703638947524667:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:25.417994Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703638947524667:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:25.418040Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703638947524667:2320], Successfully finished 2026-01-10T12:08:25.418159Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:25.418195Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:25.419384Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz temp_dir_name# a3d53a41-4784-8a65-9b84-f3ae1d0347a4 trace_id# 2026-01-10T12:08:25.419729Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [1:7593703638947524696:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:25.483082Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:25.487261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:25.490151Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:08:25.491811Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:25.502958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:08:25.560930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:25.630245Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703638947524749:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:25.630407Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703638947524698:2502], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:25.640245Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:25.640298Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:08:25.640374Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703638947524758:2328], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:08:25.640442Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:25.640633Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [1:7593703638947524696:2326], ActorState: ReadyState, LegacyTraceId: 01kekwxj97apr996jess1tvce3, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-10T12:08:25.643393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703638947524758:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:25.643515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:25.643659Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:25.643700Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703638947524767:2329], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:08:25.647561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703638947524767:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:25.647660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:25.856236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:08:25.869285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:08:25.871811Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [1:7593703638947524696:2326], ActorState: ExecuteState, LegacyTraceId: 01kekwxj97apr996jess1tvce3, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [1:7593703638947524769:2326] workload_service_cleanup# false trace_id# 2026-01-10T12:08:25.874305Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [1:7593703638947524696:2326], ActorState: CleanupState, LegacyTraceId: 01kekwxj97apr996jess1tvce3, EndCleanup is_final# false trace_id# 2026-01-10T12:08:25.874386Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=1&id=NjlmNjU5NTgtYTNiNzdmNC0xMjc4MjUzYy01NTgxOTEz, ActorId: [1:7593703638947524696:2326], Act ... ack to proxy proxy_request_id# 94 proxy_id# [8:7593704001939474540:2266] trace_id# 2026-01-10T12:10:11.658444Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Database coordinators are unavailable" severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kekx0sjmas0jk1vd14xrj1va" } } } } ; 2026-01-10T12:10:11.658815Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Database coordinators are unavailable 2026-01-10T12:10:11.658969Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZmVhOGVhNDQtZDU5ZjY4YTUtMjAzOGI2NGMtN2I5ZmNiNGM=, ActorId: [8:7593704092133791164:2942], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:11.659017Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZmVhOGVhNDQtZDU5ZjY4YTUtMjAzOGI2NGMtN2I5ZmNiNGM=, ActorId: [8:7593704092133791164:2942], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:11.659057Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZmVhOGVhNDQtZDU5ZjY4YTUtMjAzOGI2NGMtN2I5ZmNiNGM=, ActorId: [8:7593704092133791164:2942], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:11.659091Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZmVhOGVhNDQtZDU5ZjY4YTUtMjAzOGI2NGMtN2I5ZmNiNGM=, ActorId: [8:7593704092133791164:2942], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:11.659178Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZmVhOGVhNDQtZDU5ZjY4YTUtMjAzOGI2NGMtN2I5ZmNiNGM=, ActorId: [8:7593704092133791164:2942], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:10:11.760812Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM= temp_dir_name# c6ffde44-4fd4-96fd-cd1a-65ab38fbb98f trace_id# 2026-01-10T12:10:11.761402Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:10:11.761912Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ReadyState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, Received request proxy_request_id# 98 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpc_actor# [8:7593704092133791227:2957] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-10T12:10:11.761961Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ReadyState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, Request placed into pool from cache pool_id# default trace_id# 2026-01-10T12:10:11.762113Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ExecuteState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, Sending CompileQuery request trace_id# 2026-01-10T12:10:11.806513Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7593704023414311858:2664][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 48 2026-01-10T12:10:11.806656Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7593704023414311858:2664][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 49 2026-01-10T12:10:11.808139Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7593704092133791229:2958], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-10T12:10:11.817951Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ExecuteState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:10:11.818011Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ExecuteState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:11.818039Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ExecuteState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, EndCleanup is_final# false trace_id# 2026-01-10T12:10:11.818215Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ExecuteState, LegacyTraceId: 01kekx0sxhbm9gq6h3qwb4zjdw, Sent query response back to proxy proxy_request_id# 98 proxy_id# [8:7593704001939474540:2266] trace_id# 2026-01-10T12:10:11.819032Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2026-01-10T12:10:11.819385Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-10T12:10:11.819547Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:11.819596Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:11.819655Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:11.819692Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:11.819793Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NWFjYzhhNDctNmMzNTQyYTEtZGVkMzM0ZjEtYTE4OTQ5YzM=, ActorId: [8:7593704092133791226:2956], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:10:12.033360Z node 8 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [8:7593704001939474360:2108], processor id# 72075186224037891, database# /Root/test-dedicated 2026-01-10T12:10:12.406653Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU= temp_dir_name# c26ee5a2-4200-c51d-a059-fa928e8f9002 trace_id# 2026-01-10T12:10:12.412237Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU=, ActorId: [8:7593704096428758556:2962], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:10:12.412984Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU=, ActorId: [8:7593704096428758556:2962], ActorState: ReadyState, LegacyTraceId: 01kekx0thw2s9ahk916g01jxxs, Received request proxy_request_id# 100 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/test-dedicated/.metadata/initialization/migrations`; rpc_actor# [8:7593704096428758557:2963] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-10T12:10:12.413048Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=8&id=ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU=, ActorId: [8:7593704096428758556:2962], ActorState: ReadyState, LegacyTraceId: 01kekx0thw2s9ahk916g01jxxs, Request placed into pool from cache pool_id# default trace_id# 2026-01-10T12:10:12.413207Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=8&id=ZWM5ZWQ2NzktMjE1N2RmYTAtZWI1OGI0NTMtZTY5NzQyYWU=, ActorId: [8:7593704096428758556:2962], ActorState: ExecuteState, LegacyTraceId: 01kekx0thw2s9ahk916g01jxxs, Sending CompileQuery request trace_id# |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:15.082302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:15.082396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:15.084930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:15.085050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:15.085103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:15.085149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:15.085290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:15.085386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:15.086341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:15.086737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:15.186508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:15.186576Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:15.217904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:15.223280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:15.223493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:15.231880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:15.232147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:15.232950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:15.233374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:15.236827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:15.237020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:15.238341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:15.238400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:15.238516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:15.238571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:15.238611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:15.238833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:15.418290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.419991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.420056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.420882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.420959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.421049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.421156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:15.421231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ished: true CreateTxId: 281474976710675 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000018 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710692 CreateStep: 5000016 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 25 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710680 CreateStep: 5000030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:16.243977Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:679:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:10:16.246159Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-10T12:10:16.246980Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:10:16.247205Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 232us result status StatusSuccess 2026-01-10T12:10:16.247563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:16.248050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:10:16.248575Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 218us result status StatusSuccess 2026-01-10T12:10:16.248896Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:16.249381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:10:16.249564Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 189us result status StatusSuccess 2026-01-10T12:10:16.249876Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 34 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710689 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 34 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:06.223318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:06.353447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:06.363820Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:06.364436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:06.364562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:06.788577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:06.788744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:06.796178Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046942699217 != 1768046942699220 2026-01-10T12:09:06.810450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:06.859850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:06.951146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:07.495489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:07.496136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:07.496178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:07.496203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:07.496633Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:07.510576Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:17.687012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.687178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:909:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.687828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.689205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:914:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.689723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:17.694919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:17.784274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:912:2772], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:09:17.894701Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:969:2810] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:18.213236Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:979:2819], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:18.215874Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OGE4ODBjM2QtN2RjMDg3ZTQtZGRmODNjMjctNDgwYzdmMmQ=, ActorId: [1:894:2761], ActorState: ExecuteState, LegacyTraceId: 01kekwz53j59098zjcxyht2zex, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2026-01-10T12:09:18.275741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:19.463631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:19.962114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:21.184742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2026-01-10T12:09:43.192055Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1517:3220] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2026-01-10T12:09:43.192329Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1517:3220] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2026-01-10T12:10:15.388184Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1678:3339] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2026-01-10T12:10:15.388423Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1678:3339] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::BridgeModeCollectInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2026-01-10T12:09:14.428177Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1768046954428137 2026-01-10T12:09:14.971154Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703847731645881:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:14.971373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:15.005776Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.007827s 2026-01-10T12:09:15.077215Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.137671Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.466125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:15.466265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:15.472192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:15.548639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:15.548746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:15.552987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:15.553062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:15.629454Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:15.631799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:15.632212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:15.756058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:15.766653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:15.780276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:16.028307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.132530Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.133258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0022a0/r3tmp/yandexoKVzI6.tmp 2026-01-10T12:09:16.133289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0022a0/r3tmp/yandexoKVzI6.tmp 2026-01-10T12:09:16.133453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0022a0/r3tmp/yandexoKVzI6.tmp 2026-01-10T12:09:16.133516Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:16.191607Z INFO: TTestServer started on Port 64287 GrpcPort 18493 PQClient connected to localhost:18493 2026-01-10T12:09:16.865587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:19.947838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703847731645881:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:19.947960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:20.177953Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703874635831595:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:20.178084Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703874635831603:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:20.178154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:20.181033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703874635831610:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:20.181098Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:20.191926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:20.223031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-10T12:09:20.224618Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703874635831609:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:20.336234Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703874635831639:2142] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:20.721795Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703873501451072:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:20.724241Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NWVkYzY0YTctMjViYjJmMWYtZmVkZDYzZmMtNzlmZThkOTk=, ActorId: [1:7593703873501451046:2331], ActorState: ExecuteState, LegacyTraceId: 01kekwz7nv564s7t0m3ybq19cv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:20.727835Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703874635831646:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:20.730159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:20.728451Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NTU1NjVlMzEtNjljMTI3N2QtYThlZjBlMTYtZDIyZTAyOGU=, ActorId: [2:7593703874635831593:2302], ActorState: ExecuteState, LegacyTraceId: 01kekwz7hbceync5va191n4zzq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:20.732139Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:09:20.732352Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotati ... -01-10T12:10:14.047485Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.047529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.047547Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.047569Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.047584Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.152259Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.152304Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.152324Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.152351Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.152367Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.256236Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.256273Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.256289Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.256313Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.256328Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.356256Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.356294Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.356310Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.356332Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.356345Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.388606Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:14771" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:14771" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:14771" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } === Closing the session 2026-01-10T12:10:14.414382Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2026-01-10T12:10:14.416312Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2026-01-10T12:10:14.423391Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2026-01-10T12:10:14.423536Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2026-01-10T12:10:14.423862Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-10T12:10:14.423912Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2026-01-10T12:10:14.423977Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2026-01-10T12:10:14.424154Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2026-01-10T12:10:14.424413Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-10T12:10:14.424457Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-10T12:10:14.430421Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2026-01-10T12:10:14.435814Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { path: "test-topic" message_group_id: "src_id" } 2026-01-10T12:10:14.436059Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 path: "test-topic" message_group_id: "src_id" from ipv6:[::1]:52610 2026-01-10T12:10:14.436091Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="topic server" ip=ipv6:[::1]:52610 proto=topic topic=test-topic durationSec=0 2026-01-10T12:10:14.436101Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:10:14.441870Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-10T12:10:14.442022Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2026-01-10T12:10:14.442033Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:10:14.442045Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2026-01-10T12:10:14.442065Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7593704105147561760:2683] (SourceId=src_id, PreferedPartition=(NULL)) StartKqpSession 2026-01-10T12:10:14.449157Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7593704105147561760:2683] (SourceId=src_id, PreferedPartition=(NULL)) Select from the table 2026-01-10T12:10:14.452640Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: grpc closed 2026-01-10T12:10:14.452677Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: is DEAD 2026-01-10T12:10:14.454176Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YTYxYWM3NTktYjc4ZWE0NDktYTQ0Nzk3ODAtYWU2MTNhZTQ=, ActorId: [3:7593704105147561761:2685], ActorState: ExecuteState, LegacyTraceId: 01kekx0whhd5gmmjb6b9gaqckm, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Query execution is cancelled because session was requested to be closed." severity: 1 }{ message: "Cancelling after 4ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:10:14.459140Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.459179Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.459196Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.459216Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.459230Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.560352Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.560390Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.560417Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.560437Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.560451Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:14.662535Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:14.662573Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.662591Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:14.662615Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:14.662630Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:15.015811Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [3:7593704109442529091:2673] TxId: 281474976720696. Ctx: { TraceId: 01kekx0w5w461199wf06bz5bac, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZjY5NDY4MS1jNzZhZjk3Yi1hNzM1Mjg3NS1iNTRiNTkxZQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-10T12:10:15.016696Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593704109442529095:2673], TxId: 281474976720696, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx0w5w461199wf06bz5bac. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjY5NDY4MS1jNzZhZjk3Yi1hNzM1Mjg3NS1iNTRiNTkxZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7593704109442529091:2673], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |86.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TConsoleTests::TestRestartConsoleAndPools >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TConsoleConfigTests::TestModifyConfigItem >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TConsoleTests::TestCreateSharedTenant |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |86.4%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |86.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionIssuePartialPermissions |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCmsTest::BridgeModeCollectInfo [GOOD] >> TCmsTest::BridgeModeGroups >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::DisableMaintenance >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] Test command err: 2026-01-10T12:10:17.872259Z node 30 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012089s 2026-01-10T12:10:21.275365Z node 38 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007249s |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] Test command err: 2026-01-10T12:10:19.122490Z node 18 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005381s 2026-01-10T12:10:19.157356Z node 28 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005309s |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] Test command err: 2026-01-10T12:10:16.959333Z node 17 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005094s 2026-01-10T12:10:17.020492Z node 21 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007052s 2026-01-10T12:10:17.052761Z node 22 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008641s |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> BasicUsage::SimpleHandlers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] Test command err: 2026-01-10T12:10:18.997573Z node 34 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005434s |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] Test command err: 2026-01-10T12:10:14.894121Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006004s 2026-01-10T12:10:18.100360Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008683s >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2026-01-10T12:08:22.115125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703624198877197:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:22.115187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:22.186928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:22.621599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:22.621722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:22.662386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:22.683395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:22.802162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:22.946383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:23.041384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:23.041407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:23.041428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:23.041540Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:23.110073Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.497506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:23.507378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:08:26.884401Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:26.884508Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703641378747005:2322], Start check tables existence, number paths: 2 2026-01-10T12:08:26.893044Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NzQyZDkwZjQtMWUwZTQwYTctYjI2M2UyZmMtYmU2MjcwMmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NzQyZDkwZjQtMWUwZTQwYTctYjI2M2UyZmMtYmU2MjcwMmE= temp_dir_name# 50070b23-447e-0463-25ff-c3a6f315c2e9 trace_id# 2026-01-10T12:08:26.893526Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NzQyZDkwZjQtMWUwZTQwYTctYjI2M2UyZmMtYmU2MjcwMmE=, ActorId: [1:7593703641378747024:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:26.894188Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703641378747005:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:26.894236Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703641378747005:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:26.894267Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703641378747005:2322], Successfully finished 2026-01-10T12:08:26.907692Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:26.907738Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:26.907805Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:26.908047Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:26.909992Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:26.914291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:26.918830Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:08:26.922514Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:26.929920Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:08:26.984169Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:27.017558Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703641378747077:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:27.017736Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703641378747026:2506], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:27.020788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-10T12:08:27.020814Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-10T12:08:27.020893Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714380:2327], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:08:27.028283Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714380:2327], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:08:27.028378Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-10T12:08:27.028399Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:08:27.028680Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7593703645673714389:2328], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:08:27.030074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7593703645673714389:2328], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-10T12:08:27.039104Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:27.039129Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:08:27.039172Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:27.039194Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714401:2330], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:08:27.039402Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NzQyZDkwZjQtMWUwZTQwYTctYjI2M2UyZmMtYmU2MjcwMmE=, ActorId: [1:7593703641378747024:2326], ActorState: ReadyState, LegacyTraceId: 01kekwxkmy75e25ccpmbpv7qpw, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-10T12:08:27.043248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714401:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:27.043345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:08:27.043444Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:08:27.043478Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714411:2331], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:08:27.043897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703645673714411:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: ... 2M3NzdmN2M=, TxId: 2026-01-10T12:10:20.466141Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7593704130488535215:2894], ActorId: [10:7593704130488535216:2895], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery with SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, TxId: , text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2026-01-10T12:10:20.468786Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ReadyState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Received request proxy_request_id# 29 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpc_actor# [10:7593704130488535250:2543] database# /Root database_id# /Root pool_id# trace_id# 2026-01-10T12:10:20.470376Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Acquire mvcc snapshot trace_id# 2026-01-10T12:10:20.477287Z node 10 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Read snapshot result status# UNAVAILABLE step# 1768047020509 tx_id# 18446744073709551615 trace_id# 2026-01-10T12:10:20.477380Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 136312714346712 trace_id# 2026-01-10T12:10:20.477479Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:10:20.477674Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Created new KQP executer executer_id# [10:7593704130488535259:2536] is_rollback# false trace_id# 2026-01-10T12:10:20.484063Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, TEvTxResponse current_tx# 1 transactions_size# 2 status# SUCCESS trace_id# 2026-01-10T12:10:20.484205Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, ExecutePhyTx literal# true commit# true deferred_effects_size# 0 tx# 136312717015896 trace_id# 2026-01-10T12:10:20.485317Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, TEvTxResponse current_tx# 2 transactions_size# 2 status# SUCCESS trace_id# 2026-01-10T12:10:20.485493Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, TxInfo status# Committed kind# ReadOnly total_duration# 15.263 server_duration# 15.104 queries_count# 2 trace_id# 2026-01-10T12:10:20.485656Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:10:20.485745Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:20.485780Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, EndCleanup is_final# false trace_id# 2026-01-10T12:10:20.485851Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ExecuteState, LegacyTraceId: 01kekx12dm9ffg53pjd31qtys7, Sent query response back to proxy proxy_request_id# 29 proxy_id# [10:7593704023114351355:2265] trace_id# 2026-01-10T12:10:20.487149Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7593704130488535215:2894], ActorId: [10:7593704130488535216:2895], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, DataQuery #2 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, TxId: 2026-01-10T12:10:20.487286Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7593704130488535215:2894], ActorId: [10:7593704130488535216:2895], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, TxId: 2026-01-10T12:10:20.487323Z node 10 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7593704130488535215:2894], ActorId: [10:7593704130488535216:2895], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Delete session: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M= 2026-01-10T12:10:20.487436Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TRefreshPoolStateQuery] OwnerId: [10:7593704130488535214:2893], ActorId: [10:7593704130488535215:2894], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Got response [10:7593704130488535216:2895] SUCCESS 2026-01-10T12:10:20.488396Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:20.488451Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:20.488492Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:20.488528Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:20.488618Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=YzcwNTI1NDgtNmE4ZmEwMjAtMjk2N2I5YWYtY2M3NzdmN2M=, ActorId: [10:7593704130488535218:2536], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:10:20.508692Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=MTEzNTdhMTYtZjI5MjBhZjgtODk5NWNkYmMtYjgwMTVkZWQ=, ActorId: [10:7593704048884155768:2329], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:20.508761Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=MTEzNTdhMTYtZjI5MjBhZjgtODk5NWNkYmMtYjgwMTVkZWQ=, ActorId: [10:7593704048884155768:2329], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:20.508800Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=MTEzNTdhMTYtZjI5MjBhZjgtODk5NWNkYmMtYjgwMTVkZWQ=, ActorId: [10:7593704048884155768:2329], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:20.508846Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=MTEzNTdhMTYtZjI5MjBhZjgtODk5NWNkYmMtYjgwMTVkZWQ=, ActorId: [10:7593704048884155768:2329], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:20.508942Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=MTEzNTdhMTYtZjI5MjBhZjgtODk5NWNkYmMtYjgwMTVkZWQ=, ActorId: [10:7593704048884155768:2329], ActorState: unknown state, Session actor destroyed trace_id# |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> TConsoleTests::TestCreateTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> IncrementalBackup::IncrementalBackupWithIndexes [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TableCreation::TableCreationWithAcl >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |86.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> IncrementalBackup::BasicIndexIncrementalRestore [GOOD] >> IncrementalBackup::CdcVersionSync >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestValidation >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |86.6%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> ScriptExecutionsTest::RestartQueryWithGetOperation >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2026-01-10T12:09:14.368874Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1768046954368835 2026-01-10T12:09:14.880829Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703848222200351:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:14.882680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:14.959609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:14.987472Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.117257Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.497303Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:15.497451Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:15.538027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:15.624851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:15.625025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:15.626751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:15.626830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:15.666032Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:15.666217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:15.688281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:15.885084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:15.899069Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:15.899274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:15.916671Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:16.035063Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.406967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0022a2/r3tmp/yandexvJDZoC.tmp 2026-01-10T12:09:16.407010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0022a2/r3tmp/yandexvJDZoC.tmp 2026-01-10T12:09:16.407170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0022a2/r3tmp/yandexvJDZoC.tmp 2026-01-10T12:09:16.407265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:16.538902Z INFO: TTestServer started on Port 8312 GrpcPort 20866 PQClient connected to localhost:20866 2026-01-10T12:09:17.189777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:19.873849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703848222200351:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:19.873957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:21.376658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877211380881:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.376661Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877211380892:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.376769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.377325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877211380895:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.377402Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.384956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:21.442725Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703877211380896:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:21.787755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:21.832281Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703877211380927:2144] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:21.864596Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703878286972715:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:21.867935Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTY4MDA5MWEtZGNhYzEyMzEtNzA1M2QzZTItMTY2YTg4YTc=, ActorId: [1:7593703878286972673:2333], ActorState: ExecuteState, LegacyTraceId: 01kekwz8qr4v977av290xrmt08, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:21.878451Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:09:21.920932Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703877211380942:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:21.923556Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NjAzNDk5NTQtODI5NTdkMGQtNzg0OTQ4YjMtYjIyODY3NWM=, ActorId: [2:7593703877211380879:2303], ActorState: ExecuteState, LegacyTraceId: 01kekwz8pwbvq5eq1cxe47r6dj, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:21.923933Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At ... _v1 2026-01-10T12:10:22.126477Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7593704129125720082:2487] destroyed 2026-01-10T12:10:22.126513Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:10:22.126545Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.126562Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.126576Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.126593Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.126605Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.126694Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_2_5486143439083686876_v1 2026-01-10T12:10:22.132261Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.132301Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.132315Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.132339Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.132352Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.154645Z :INFO: [/Root] [/Root] [c07a975e-e28ba759-2d96e32-7c817296] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.154724Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:10:22.154788Z :INFO: [/Root] [/Root] [c07a975e-e28ba759-2d96e32-7c817296] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1918 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.154830Z :INFO: [/Root] [/Root] [8c3ee07b-a84ce667-47403e5e-d41f26c] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.154874Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2026-01-10T12:10:22.154902Z :INFO: [/Root] [/Root] [8c3ee07b-a84ce667-47403e5e-d41f26c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1880 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.154923Z :INFO: [/Root] [/Root] [ace208e5-4048bf4b-cb47362c-e7a19adb] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.154951Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:10:22.154972Z :INFO: [/Root] [/Root] [ace208e5-4048bf4b-cb47362c-e7a19adb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1862 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.155001Z :INFO: [/Root] [/Root] [ace208e5-4048bf4b-cb47362c-e7a19adb] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.155038Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:10:22.155080Z :INFO: [/Root] [/Root] [ace208e5-4048bf4b-cb47362c-e7a19adb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1862 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.155177Z :NOTICE: [/Root] [/Root] [ace208e5-4048bf4b-cb47362c-e7a19adb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:10:22.156224Z :INFO: [/Root] [/Root] [8c3ee07b-a84ce667-47403e5e-d41f26c] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.156267Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2026-01-10T12:10:22.156297Z :INFO: [/Root] [/Root] [8c3ee07b-a84ce667-47403e5e-d41f26c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1882 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.156352Z :NOTICE: [/Root] [/Root] [8c3ee07b-a84ce667-47403e5e-d41f26c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:10:22.156816Z :INFO: [/Root] [/Root] [c07a975e-e28ba759-2d96e32-7c817296] Closing read session. Close timeout: 0.000000s 2026-01-10T12:10:22.156851Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:10:22.156879Z :INFO: [/Root] [/Root] [c07a975e-e28ba759-2d96e32-7c817296] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1920 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:10:22.156923Z :NOTICE: [/Root] [/Root] [c07a975e-e28ba759-2d96e32-7c817296] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:10:22.232588Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.232627Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.232639Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.232661Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.232674Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.336416Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.336449Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.336462Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.336482Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.336493Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.440266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.440308Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.440322Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.440341Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.440353Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.548240Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.548291Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.548306Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.548331Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.548345Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:22.649870Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:22.649901Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.649914Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:22.649935Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:22.649957Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:10:23.111539Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [3:7593704142010622163:2503] TxId: 281474976710678. Ctx: { TraceId: 01kekx147fdzd3qcrfzmx5tmwk, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ODg5YWUxNjgtNzg0ZWY1MTMtNzc0NTQxNjctNjg4ZTU0Mzc=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-10T12:10:23.111756Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593704142010622167:2503], TxId: 281474976710678, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx147fdzd3qcrfzmx5tmwk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ODg5YWUxNjgtNzg0ZWY1MTMtNzc0NTQxNjctNjg4ZTU0Mzc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593704142010622163:2503], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2026-01-10T12:10:24.113521Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ODg5YWUxNjgtNzg0ZWY1MTMtNzc0NTQxNjctNjg4ZTU0Mzc=, ActorId: [3:7593704137715654829:2503], ActorState: ExecuteState, LegacyTraceId: 01kekx147fdzd3qcrfzmx5tmwk, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } trace_id# 2026-01-10T12:10:24.116292Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } TxMeta { id: "01kekx14yz0x9fq7beeksd0hav" } } YdbStatus: UNAVAILABLE ConsumedRu: 487 } |86.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant |86.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> TCmsTest::DisableMaintenance [GOOD] >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> TCmsTest::BridgeModeGroups [GOOD] >> TCmsTest::BridgeModeNodeLimit >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> QueryStats::Ranges [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-10T12:10:29.140680Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:10:29.142083Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-10T12:10:29.142439Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:10:29.142608Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-10T12:10:29.142729Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-10T12:10:29.142766Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:10:29.142968Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:10:29.143101Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:10:29.143271Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-10T12:10:29.143333Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:10:29.143496Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-10T12:10:29.143549Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-10T12:10:29.143590Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:10:29.143686Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-10T12:10:29.143718Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:10:29.143805Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-10T12:10:29.143846Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:10:29.143943Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-10T12:10:29.143993Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-10T12:10:29.144020Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:10:29.144116Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-10T12:10:29.144137Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:10:29.144211Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-10T12:10:29.156349Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:10:29.156445Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:10:29.156491Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:10:29.156512Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:10:29.168341Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-10T12:10:29.168420Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-10T12:10:29.168454Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:10:29.168546Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:10:29.168576Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-10T12:10:29.168618Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:10:29.168641Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:10:29.168768Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:10:29.168818Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::DisableMaintenance [GOOD] Test command err: 2026-01-10T12:10:03.102776Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false } } 2026-01-10T12:10:03.103229Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-10T12:10:03.134534Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:10:03.135401Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-10T12:10:03.137533Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-10T12:10:03.138327Z node 2 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-10T12:10:03.138564Z node 2 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.086000s 2026-01-10T12:10:03.138611Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:10:03.138710Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-10T12:10:03.138754Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-10T12:10:03.138803Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-10T12:10:03.138826Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-10T12:10 ... inel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.257402Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.257560Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.257800Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.257889Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.257976Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.258078Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.258163Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-10T12:10:14.258261Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:10:14.260024Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2026-01-10T12:10:14.268539Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-10T12:10:14.269361Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-10T12:10:14.270026Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-10T12:10:14.270207Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 2026-01-10T12:10:14.305423Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-10T12:10:14.340667Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:10:14.340854Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-10T12:10:14.341001Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:04:00Z 2026-01-10T12:10:14.342498Z node 2 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:14.342665Z node 2 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } 2026-01-10T12:10:14.342752Z node 2 :CMS DEBUG: cms.cpp:416: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2026-01-10T12:10:14.343055Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:10:14.343339Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:14.361234Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-10T12:10:14.361601Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2026-01-10T12:10:14.362441Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-10T12:10:14.381610Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2026-01-10T12:10:14.381978Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false 2026-01-10T12:10:20.088320Z node 2 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:10:20.088402Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:10:20.088731Z node 2 :CMS DEBUG: cms.cpp:1178: Running CleanupWalleTasks 2026-01-10T12:10:20.089033Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-10T12:10:20.089115Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-10T12:10:20.089158Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-10T12:10:20.089196Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-10T12:10:20.089223Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-10T12:10:20.089258Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-10T12:10:20.089286Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-10T12:10:20.089310Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:10:20.089577Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090392Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090526Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090597Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090683Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090770Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090835Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090896Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-10T12:10:20.090945Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:10:20.091284Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-10T12:10:20.091389Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-10T12:10:20.091666Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-10T12:10:20.091981Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-10T12:10:20.092035Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TCmsTest::WalleTasksDifferentPriorities [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] Test command err: 2026-01-10T12:10:19.084531Z node 27 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008183s |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> IncrementalBackup::MultipleIndexesIncrementalRestore [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TSchemeShardViewTest::DropView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2026-01-10T12:10:25.134855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:25.134926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:25.135595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:10:25.156206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:10:25.156649Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:10:25.156930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:10:25.207765Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:10:25.223437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:10:25.226092Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:10:25.227881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:10:25.227953Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:10:25.228001Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:10:25.228383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:10:25.228605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:10:25.228673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:10:25.322989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:10:25.387725Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:10:25.387994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:10:25.391763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:10:25.391878Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:10:25.391973Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:10:25.392015Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:10:25.392329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:25.392420Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:25.392824Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:10:25.392944Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:10:25.393433Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:10:25.393499Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:10:25.393538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:10:25.393577Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:10:25.393611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:10:25.393661Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:10:25.393715Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:10:25.393840Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:25.393880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:25.393941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:10:25.403693Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:10:25.403780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:10:25.403917Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:10:25.406025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:10:25.406108Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:10:25.406222Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:10:25.406292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:10:25.406335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:10:25.406372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:10:25.406407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:10:25.406711Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:10:25.408098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:10:25.408161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:10:25.408212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:10:25.408319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:10:25.408363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:10:25.408403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:10:25.408435Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:10:25.408460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:10:25.429986Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:10:25.430057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:10:25.430095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:10:25.430155Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:10:25.430241Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:10:25.430818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:25.430875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:25.430936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:10:25.431098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:10:25.431131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:10:25.431280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:10:25.431325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:10:25.431359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:10:25.431408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:10:25.435619Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:10:25.435704Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:10:25.436109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:25.436163Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:25.436223Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:10:25.436265Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:10:25.436303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:10:25.436353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:10:25.436393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:348:2315]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:10:32.599352Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.599388Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:10:32.599502Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.599537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.599590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.599646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-10T12:10:32.599679Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.599842Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.599883Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.599909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.599998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.600058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:10:32.600117Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.600240Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.600268Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.600286Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.600307Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.600330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.600363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.600400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:10:32.600420Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.600520Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.600548Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:10:32.600584Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.600619Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.600663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.600699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:10:32.600738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.600826Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.600846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.600880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.600913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:10:32.600938Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.601019Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.601054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.601090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.601140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:10:32.601164Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.601250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:10:32.601274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:10:32.601302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:10:32.601334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:10:32.601353Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:10:32.601551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-10T12:10:32.601584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.601627Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-10T12:10:32.601720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:10:32.601748Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.601788Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-10T12:10:32.601911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:10:32.601957Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.601982Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-10T12:10:32.602078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:10:32.602103Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.602140Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-10T12:10:32.602230Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:10:32.602254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.602281Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-10T12:10:32.602368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:10:32.602392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.602414Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-10T12:10:32.602483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:10:32.602514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.602541Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:10:32.602623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:10:32.602655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:10:32.602683Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TestMalformedRequest::CompressedDeflateContentLengthCorrect >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TCmsTest::BridgeModeNodeLimit [GOOD] >> TestMalformedRequest::ContentLengthCorrect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] Test command err: 2026-01-10T12:10:11.430934Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:10:11.431069Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-10T12:10:11.431262Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-10T12:10:11.434362Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-10T12:10:11.435277Z node 9 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-10T12:10:11.435685Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-10T12:10:11.435792Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-10T12:10:11.435861Z node 9 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 9 has not yet been completed) 2026-01-10T12:10:11.436067Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:10:11.436365Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:11.436427Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 9, marker# MARKER_DISK_FAULTY 2026-01-10T12:10:11.436777Z node 9 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.084000s 2026-01-10T12:10:11.436858Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:10:11.436995Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:10:11.437054Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-10T12:10:11.437117Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-10T12:10:11.437186Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-10T12:10:11.437225Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:10:11.437275Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:10:11.437316Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, ... 147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-10T12:10:17.399227Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:9, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-10T12:10:17.399288Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2026-01-10T12:10:17.399576Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-10T12:10:17.399828Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-10T12:10:17.399990Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2026-01-10T12:10:17.400045Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:9 2026-01-10T12:10:17.400097Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2026-01-10T12:10:17.421777Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-10T12:10:17.421879Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-10T12:10:17.438400Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:10:17.438521Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-10T12:10:17.438611Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-10T12:10:17.439467Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:17.439594Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } 2026-01-10T12:10:17.439675Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2026-01-10T12:10:17.439765Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 0; State: Ok 2026-01-10T12:10:17.439797Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 1; State: Ok 2026-01-10T12:10:17.439818Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 2; State: Ok 2026-01-10T12:10:17.439848Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-10T12:10:17.440014Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user, priority# 0 2026-01-10T12:10:17.444235Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-10T12:10:17.444452Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:10:17.444748Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.210512Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-10T12:10:17.444886Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:17.461446Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-10T12:10:17.461863Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } Deadline: 780210512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2026-01-10T12:10:17.461943Z node 9 :CMS DEBUG: cms.cpp:1094: Schedule cleanup at 1970-01-01T00:33:00.210512Z 2026-01-10T12:10:17.482604Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-10T12:10:17.483048Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:10:17.483139Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-10T12:10:17.483215Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-10T12:10:17.484222Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:17.484348Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2026-01-10T12:10:17.484432Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2026-01-10T12:10:17.484496Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-10T12:10:17.484663Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user, priority# 0 2026-01-10T12:10:17.484739Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (10) (permission user-p-2 until 1970-01-01T00:13:00Z) 2026-01-10T12:10:17.484840Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:10:17.485015Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.312024Z, action# Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 2026-01-10T12:10:17.485127Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:10:17.505491Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-10T12:10:17.505855Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 } Deadline: 780312024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 10 InterconnectPort: 12002 } } } } 2026-01-10T12:10:17.506617Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-1 2026-01-10T12:10:17.506687Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-10T12:10:17.506774Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-10T12:10:17.506918Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 9 2026-01-10T12:10:17.507045Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2026-01-10T12:10:17.507099Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2026-01-10T12:10:17.519781Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-10T12:10:17.520055Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-10T12:10:17.520744Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-2 2026-01-10T12:10:17.520816Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-10T12:10:17.520898Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-10T12:10:17.521009Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 10 2026-01-10T12:10:17.521125Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2026-01-10T12:10:17.521173Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2026-01-10T12:10:17.533883Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-10T12:10:17.534106Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-10T12:10:20.189671Z node 22 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005540s 2026-01-10T12:10:31.897951Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 49 2026-01-10T12:10:31.899305Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 59 2026-01-10T12:10:31.899690Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 60 2026-01-10T12:10:31.899826Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 61 2026-01-10T12:10:31.899870Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 62 2026-01-10T12:10:31.899941Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 63 2026-01-10T12:10:31.899993Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 64 2026-01-10T12:10:31.900029Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 50 2026-01-10T12:10:31.900070Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 51 2026-01-10T12:10:31.900214Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 52 2026-01-10T12:10:31.900272Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 55 2026-01-10T12:10:31.900321Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 56 2026-01-10T12:10:31.900357Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 57 2026-01-10T12:10:31.900390Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 58 2026-01-10T12:10:31.900430Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 53 2026-01-10T12:10:31.900465Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:10:20.845929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:21.161320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:10:21.179918Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:10:21.184358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:21.184484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:10:21.693271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:21.693430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:21.700739Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047016225043 != 1768047016225046 2026-01-10T12:10:21.717877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:21.767097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:21.897088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:10:22.353961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:22.396534Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:22.526175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:22.583883Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:10:22.585119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:10:22.585463Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:10:22.585743Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:10:22.649320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:10:22.650231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:10:22.650401Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:10:22.654450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:10:22.654562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:10:22.654625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:10:22.655090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:10:22.655262Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:10:22.655367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:10:22.667747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:10:22.723157Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:10:22.723430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:10:22.723563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:10:22.723614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:10:22.723661Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:10:22.723703Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:10:22.723997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:22.724069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:22.724493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:10:22.724609Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:10:22.726965Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:10:22.727040Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:10:22.727086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:10:22.727124Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:10:22.727169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:10:22.727205Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:10:22.727288Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:10:22.727408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:22.727454Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:10:22.727500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:10:22.727598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:10:22.727654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:10:22.727795Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:10:22.728094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:10:22.728163Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:10:22.728294Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:10:22.728377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:10:22.728440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:10:22.728482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:10:22.728523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:10:22.728858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:10:22.728904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:10:22.728954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:10:22.728986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:10:22.729050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:10:22.729080Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:10:22.729112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:10:22.729154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:10:22.729187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:10:22.731211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:10:22.731269Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:10:22.743912Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 3000} 2026-01-10T12:10:33.456041Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T12:10:33.456069Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-01-10T12:10:33.456120Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:10:33.456217Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1364:3109], Recipient [2:1220:3006]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:33.456247Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:33.456280Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1362:3107], serverId# [2:1364:3109], sessionId# [0:0:0] 2026-01-10T12:10:33.456879Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1220:3006]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-10T12:10:33.456918Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-10T12:10:33.456949Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 3000 2026-01-10T12:10:33.456991Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-10T12:10:33.457037Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:10:33.468412Z node 2 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037893 ack snapshot OpId 281474976715665 2026-01-10T12:10:33.468582Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037893 2026-01-10T12:10:33.468709Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:10:33.468791Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2026-01-10T12:10:33.468844Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037893, actorId: [2:1372:3117] 2026-01-10T12:10:33.468871Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037893 2026-01-10T12:10:33.468906Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037893 2026-01-10T12:10:33.468934Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-10T12:10:33.469103Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037888 ack split to schemeshard 281474976715664 2026-01-10T12:10:33.469299Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1224:3008], Recipient [2:1224:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:33.469336Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:10:33.470335Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553157, Sender [2:1224:3008], Recipient [2:970:2824]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2026-01-10T12:10:33.470412Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2026-01-10T12:10:33.471112Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [2:1363:3108], Recipient [2:970:2824]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1363:3108] ServerId: [2:1365:3110] } 2026-01-10T12:10:33.471156Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T12:10:33.471256Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-10T12:10:33.471290Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:10:33.471320Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2026-01-10T12:10:33.471346Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2026-01-10T12:10:33.471371Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2026-01-10T12:10:33.471396Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2026-01-10T12:10:33.471432Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-10T12:10:33.471619Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1224:3008]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 3000} 2026-01-10T12:10:33.471653Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-10T12:10:33.471681Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 3000 2026-01-10T12:10:33.471712Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-10T12:10:33.471801Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1365:3110], Recipient [2:1224:3008]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:33.471829Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:33.471861Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1363:3108], serverId# [2:1365:3110], sessionId# [0:0:0] 2026-01-10T12:10:33.474097Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1224:3008]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-10T12:10:33.474142Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-10T12:10:33.474173Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 3000 2026-01-10T12:10:33.474217Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037893: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-10T12:10:33.474895Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:10:33.475849Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:399:2398], Recipient [2:888:2766] 2026-01-10T12:10:33.475956Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2026-01-10T12:10:33.478330Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2026-01-10T12:10:33.478403Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:10:33.479010Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:10:33.490491Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715665 2026-01-10T12:10:33.493994Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:399:2398], Recipient [2:972:2825] 2026-01-10T12:10:33.494079Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-01-10T12:10:33.496505Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2026-01-10T12:10:33.496582Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:10:33.497086Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:962:2819], Recipient [2:970:2824]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:10:33.984789Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1169:2864], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1169 RawX2: 8589937456 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\221\004\000\000\000\000\000\000\0210\013\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? L\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2026-01-10T12:10:33.984875Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:10:33.985010Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3109: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-10T12:10:33.985518Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2026-01-10T12:10:33.986136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TestMalformedRequest::CompressedDeflateContentLengthNone >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TSchemeShardViewTest::DropView [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:35.178891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:35.179004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:35.179054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:35.179092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:35.179142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:35.179197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:35.179283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:35.179374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:35.180395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:35.180731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:35.372062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:35.372177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:35.401846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:35.401976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:35.402231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:35.430668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:35.430988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:35.431753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:35.432013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:35.442626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:35.442862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:35.444310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:35.444382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:35.444447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:35.444490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:35.444599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:35.444909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:35.696810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.698954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.699075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.699180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:35.699274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:10:37.037592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-10T12:10:37.037738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-10T12:10:37.038097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:37.038214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:37.038277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-10T12:10:37.038415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:10:37.038599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:10:37.038663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:10:37.040708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:37.040764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:37.040936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:10:37.041111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:37.041157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:10:37.041206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:10:37.041682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:10:37.041734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:10:37.041853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:10:37.041890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:37.041941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:10:37.041972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:37.042004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:10:37.042045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:37.042097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:10:37.042134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:10:37.042220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:10:37.042274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:10:37.042312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:10:37.042347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:10:37.042910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:10:37.043033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:10:37.043079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:10:37.043116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:10:37.043162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:10:37.043724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:10:37.043803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:10:37.043844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:10:37.043881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:10:37.043915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:10:37.044024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:10:37.044638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:10:37.044708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:10:37.044801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:10:37.048450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:10:37.049605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:10:37.049707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:10:37.049927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:10:37.049992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:10:37.050430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:10:37.050539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:10:37.050576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:734:2724] TestWaitNotification: OK eventTxId 102 2026-01-10T12:10:37.051084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:10:37.051316Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 243us result status StatusPathDoesNotExist 2026-01-10T12:10:37.051499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2026-01-10T12:10:19.428321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:19.428394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:19.699218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:21.572753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:21.572824Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:21.689663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:23.599526Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:23.599599Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:23.787058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:25.469460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:25.469565Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:25.573166Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:27.048437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:27.048510Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:27.168857Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:28.730630Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:28.730709Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:28.841476Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:30.158625Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:30.158703Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:30.256406Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) E0110 12:10:32.236422665 165655 trace.cc:67] Unknown trace var: 'sdk_authz' 2026-01-10T12:10:32.237181Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CONFIGS has been changed from WARN to NOTICE 2026-01-10T12:10:32.237313Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CONFIGS has been changed from WARN to DEBUG 2026-01-10T12:10:32.237375Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CLUSTER has been changed from WARN to NOTICE 2026-01-10T12:10:32.237409Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CLUSTER has been changed from WARN to DEBUG 2026-01-10T12:10:32.237439Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_PROXY has been changed from WARN to NOTICE 2026-01-10T12:10:32.237470Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_PROXY has been changed from WARN to DEBUG 2026-01-10T12:10:32.237495Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_WORKER has been changed from WARN to NOTICE 2026-01-10T12:10:32.237524Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_WORKER has been changed from WARN to DEBUG 2026-01-10T12:10:32.237551Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_GATEWAY has been changed from WARN to NOTICE 2026-01-10T12:10:32.237579Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_GATEWAY has been changed from WARN to DEBUG 2026-01-10T12:10:32.237609Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_YQL has been changed from WARN to NOTICE 2026-01-10T12:10:32.237639Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_YQL has been changed from WARN to DEBUG 2026-01-10T12:10:32.237668Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_EXECUTER has been changed from WARN to NOTICE 2026-01-10T12:10:32.237696Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_EXECUTER has been changed from WARN to DEBUG 2026-01-10T12:10:32.237725Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPUTE has been changed from WARN to NOTICE 2026-01-10T12:10:32.237754Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPUTE has been changed from WARN to DEBUG 2026-01-10T12:10:32.237783Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SLOW_LOG has been changed from WARN to NOTICE 2026-01-10T12:10:32.237810Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SLOW_LOG has been changed from WARN to DEBUG 2026-01-10T12:10:32.237839Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_SERVICE has been changed from WARN to NOTICE 2026-01-10T12:10:32.237871Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_SERVICE has been changed from WARN to DEBUG 2026-01-10T12:10:32.237901Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_ACTOR has been changed from WARN to NOTICE 2026-01-10T12:10:32.237934Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_ACTOR has been changed from WARN to DEBUG 2026-01-10T12:10:32.237961Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to NOTICE 2026-01-10T12:10:32.237991Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to DEBUG 2026-01-10T12:10:32.238023Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to NOTICE 2026-01-10T12:10:32.238051Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to DEBUG 2026-01-10T12:10:32.238080Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_BLOBS_STORAGE has been changed from WARN to NOTICE 2026-01-10T12:10:32.238110Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_BLOBS_STORAGE has been changed from WARN to DEBUG 2026-01-10T12:10:32.238138Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_REQUEST has been changed from WARN to NOTICE 2026-01-10T12:10:32.238165Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_REQUEST has been changed from WARN to DEBUG 2026-01-10T12:10:32.238193Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_NODE has been changed from WARN to NOTICE 2026-01-10T12:10:32.238222Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_NODE has been changed from WARN to DEBUG 2026-01-10T12:10:32.238251Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_LOAD_TEST has been changed from WARN to NOTICE 2026-01-10T12:10:32.238279Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_LOAD_TEST has been changed from WARN to DEBUG 2026-01-10T12:10:32.238308Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SESSION has been changed from WARN to NOTICE 2026-01-10T12:10:32.238337Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SESSION has been changed from WARN to DEBUG 2026-01-10T12:10:32.238372Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to NOTICE 2026-01-10T12:10:32.238403Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to DEBUG 2026-01-10T12:10:32.238431Z node 8 :CMS_CONFIGS NOTICE: log_settings_con ... 026-01-10T12:10:36.512721Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from WARN to ALERT 2026-01-10T12:10:36.512749Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2026-01-10T12:10:36.512779Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-10T12:10:36.512812Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-10T12:10:36.512842Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2026-01-10T12:10:36.512870Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-10T12:10:36.512901Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-10T12:10:36.512928Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2026-01-10T12:10:36.512961Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-10T12:10:36.512993Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-10T12:10:36.513021Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2026-01-10T12:10:36.513051Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-10T12:10:36.513081Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-10T12:10:36.513107Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_BRIDGE has been changed from 0 to 10 2026-01-10T12:10:36.513136Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-10T12:10:36.513165Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-10T12:10:36.513190Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_CLUSTER_BALANCING has been changed from 0 to 10 2026-01-10T12:10:36.513222Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-10T12:10:36.513255Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-10T12:10:36.513281Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_BRIDGE_SYNC has been changed from 0 to 10 2026-01-10T12:10:36.513309Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-10T12:10:36.513337Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-10T12:10:36.513367Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PHANTOM_FLAG_STORAGE has been changed from 0 to 10 2026-01-10T12:10:36.513397Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-10T12:10:36.513426Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-10T12:10:36.513453Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2026-01-10T12:10:36.513497Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-10T12:10:36.513540Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-10T12:10:36.513571Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SASL_AUTH has been changed from 0 to 10 2026-01-10T12:10:36.513603Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-10T12:10:36.513631Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-10T12:10:36.513658Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2026-01-10T12:10:36.513690Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-10T12:10:36.513723Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-10T12:10:36.513752Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2026-01-10T12:10:36.513782Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-10T12:10:36.513810Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-10T12:10:36.513836Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2026-01-10T12:10:36.513867Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-10T12:10:36.513895Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-10T12:10:36.513924Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2026-01-10T12:10:36.513959Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-10T12:10:36.513988Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-10T12:10:36.514017Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2026-01-10T12:10:36.514050Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-10T12:10:36.514078Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-10T12:10:36.514105Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BRIDGE has been changed from 0 to 10 2026-01-10T12:10:36.514136Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-10T12:10:36.514167Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-10T12:10:36.524229Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TRANSFER has been changed from 0 to 10 2026-01-10T12:10:36.524348Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-10T12:10:36.524396Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-10T12:10:36.524439Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TOKEN_MANAGER has been changed from 0 to 10 2026-01-10T12:10:36.524470Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-10T12:10:36.524502Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-10T12:10:36.524528Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LOCAL_DB_BACKUP has been changed from 0 to 10 2026-01-10T12:10:36.524557Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-10T12:10:36.524585Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-10T12:10:36.524613Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SCHEMA_SECRET_CACHE has been changed from 0 to 10 2026-01-10T12:10:36.524754Z node 11 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ... waiting for config update (done) |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> TableCreation::TableCreationWithAcl [GOOD] >> TableCreation::UpdateTableWithAclModification |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> S3SettingsConversion::Basic >> S3SettingsConversion::Basic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> ColumnShardTiers::TieringUsage |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> TestMalformedRequest::CompressedDeflateContentLengthCorrect [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> TestMalformedRequest::ContentLengthCorrect [GOOD] >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> TestMalformedRequest::CompressedDeflateContentLengthNone [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> TestMalformedRequest::CompressedGzipContentLengthCorrect >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> IncrementalBackup::CdcVersionSync [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] Test command err: 2026-01-10T12:10:35.296555Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704196436542230:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:35.313175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:35.887473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:10:35.895050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:35.895149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:35.935886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:36.091821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:36.116220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704196436542102:2081] 1768047035235592 != 1768047035235595 2026-01-10T12:10:36.191399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:10:36.354826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:36.361281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:36.361300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:36.361346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:36.361418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:36.788566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:36.801751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:10:37.169956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:10:37.184917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:10:37.187198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:37.228791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:10:37.398022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:37.587284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:37.669221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:37.751700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:37.864802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:37.928996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.012569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.058282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.131874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.188495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:40.300193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704196436542230:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:40.300260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:41.085966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222206347548:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.086107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.086457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222206347560:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.086497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222206347561:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.086744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.090814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:41.111858Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704222206347564:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:10:41.209816Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704222206347615:3110] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:10:41.727399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:41.775671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:41.816291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... n" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.566094Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 88ms 2026-01-10T12:10:43.566511Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.566548Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:10:43.566642Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 91ms 2026-01-10T12:10:43.567113Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.597083Z node 1 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.597115Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 32ms 2026-01-10T12:10:43.597339Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.597367Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:10:43.597431Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 32ms 2026-01-10T12:10:43.598041Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:43.744947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704230796282905:2440]: Pool not found 2026-01-10T12:10:43.745651Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:10:44.046950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704230796282886:2439]: Pool not found 2026-01-10T12:10:44.047536Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:10:44.050507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704235091250322:2463], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:44.050572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593704235091250323:2464], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:10:44.050610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:44.062244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704235091250326:2465], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:44.062420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:44.400048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704235091250320:2462]: Pool not found 2026-01-10T12:10:44.400415Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:10:44.461497Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46324) incoming connection opened 2026-01-10T12:10:44.461630Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46324) -> (POST /Root, 24 bytes) 2026-01-10T12:10:44.461886Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f84f:ebd0:c97b:0:e04f:ebd0:c97b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 14882980-b024a948-566c5927-2e41f851 2026-01-10T12:10:44.463046Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [14882980-b024a948-566c5927-2e41f851] reply with status: BAD_REQUEST message: Failed to decode POST body 2026-01-10T12:10:44.463235Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46324) <- (400 AccessDeniedException, 73 bytes) 2026-01-10T12:10:44.463292Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46324) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: deflate Content-Length: 32 {"QueueName": "Example"} 2026-01-10T12:10:44.463327Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46324) Response: HTTP/1.1 400 AccessDeniedException Connection: keep-alive x-amzn-requestid: 14882980-b024a948-566c5927-2e41f851 Content-Type: application/x-amz-json-1.1 Content-Length: 73 Http output full {"__type":"AccessDeniedException","message":"Failed to decode POST body"} 2026-01-10T12:10:44.465487Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:46324) connection closed |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> IncrementalBackup::IncrementalBackupWithCoveringIndex [GOOD] >> IncrementalBackup::IncrementalBackupMultipleIndexes >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:44.440198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:44.440324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.440374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:44.440409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:44.440462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:44.440520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:44.440596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.440665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:44.441559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:44.441843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:44.534266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:44.534334Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:44.553670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:44.553815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:44.554046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:44.562191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:44.562396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:44.562978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:44.563202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:44.566095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:44.566287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:44.567216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:44.567278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:44.567335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:44.567386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:44.567421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:44.567616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:44.797744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.798925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:44.799219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t TxId: 102 at schemeshard: 72057594046678944 2026-01-10T12:10:46.277793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:889:2845] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-10T12:10:46.278246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:128:2153], Recipient [1:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:10:46.278288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:10:46.278350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:10:46.278416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-10T12:10:46.278460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:10:46.278503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-10T12:10:46.278561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-10T12:10:46.278647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-10T12:10:46.278704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-10T12:10:46.280621Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:10:46.280670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-10T12:10:46.280810Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:128:2153], Recipient [1:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:10:46.280858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:10:46.280937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:10:46.280985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:10:46.281104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:10:46.281133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:10:46.281172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:46.281221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:10:46.281255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:46.281289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:10:46.281365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2755] message: TxId: 102 2026-01-10T12:10:46.281455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:10:46.281502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:10:46.281551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:10:46.281719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:10:46.281771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:10:46.283702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:10:46.283812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:788:2755] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-10T12:10:46.284013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:10:46.284066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:919:2867] 2026-01-10T12:10:46.284356Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:921:2869], Recipient [1:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:46.284417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:10:46.284449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-10T12:10:46.284862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:992:2938], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-10T12:10:46.284923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:46.285062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:46.285307Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy" took 258us result status StatusSuccess 2026-01-10T12:10:46.285840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy" PathDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableCopy" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:46.286457Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:993:2939], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-10T12:10:46.286506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:46.286616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:46.286988Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex" took 313us result status StatusPathDoesNotExist 2026-01-10T12:10:46.287167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TableCopy/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/TableCopy\' (id: [OwnerId: 72057594046678944, LocalPathId: 41])" Path: "/MyRoot/TableCopy/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/TableCopy" LastExistedPrefixPathId: 41 LastExistedPrefixDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:44.719453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:44.719598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.719653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:44.719696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:44.719782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:44.719822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:44.719892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.720011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:44.721153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:44.721510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:44.820981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:44.821076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:44.839071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:44.839194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:44.839376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:44.857455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:44.857738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:44.858553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:44.858831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:44.864231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:44.864457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:44.865761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:44.865835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:44.865909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:44.865965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:44.866042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:44.866337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:45.057182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.058903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.059645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... s: true ShowPrivateTable: true } 2026-01-10T12:10:46.631302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:46.631427Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:46.631812Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex" took 327us result status StatusSuccess 2026-01-10T12:10:46.632885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Index impl table name: indexImplTable 2026-01-10T12:10:46.633603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1079:3013], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:10:46.633658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:46.633812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:46.634124Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" took 326us result status StatusSuccess 2026-01-10T12:10:46.635076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestRemoveTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:44.976376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:44.976495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.976552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:44.976587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:44.976640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:44.976671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:44.976737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:44.976805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:44.977710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:44.977988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:45.081483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:45.081583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:45.099392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:45.099495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:45.099686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:45.111852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:45.112148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:45.112907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:45.113142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:45.119538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:45.119748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:45.120830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:45.120880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:45.120930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:45.121001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:45.121049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:45.121326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:45.281568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.282981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.283045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:45.283090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 45 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:47.077117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1225:3136], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:10:47.077195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:47.077289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:47.077536Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2" took 229us result status StatusSuccess 2026-01-10T12:10:47.078397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2" PathDescription { Self { Name: "ValueIndex2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex2" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value2" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:47.079060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1226:3137], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-10T12:10:47.079134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:47.079269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:47.079513Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2/indexImplTable" took 202us result status StatusSuccess 2026-01-10T12:10:47.080006Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 47 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> ScriptExecutionsTest::BackgroundOperationRestart >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] >> TableCreation::UpdateTableWithAclModification [GOOD] >> TableCreation::UpdateTableAcl >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> S3SettingsConversion::Port >> S3SettingsConversion::Port [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpProxy::DatabasesCacheForServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:47.627774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:47.627886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:47.627931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:47.627970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:47.628044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:47.628116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:47.628217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:47.628304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:47.629255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:47.629582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:47.728417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:47.728515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:47.745484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:47.745609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:47.745809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:47.755348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:47.755612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:47.756424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:47.756693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:47.759368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:47.759583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:47.760887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:47.760972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:47.761072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:47.761131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:47.761183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:47.761475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:47.944288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.945926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:47.946515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:50.012736Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 240us result status StatusSuccess 2026-01-10T12:10:50.013568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:50.014248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1499:3323], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:10:50.014300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:10:50.014405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:10:50.014715Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 281us result status StatusSuccess 2026-01-10T12:10:50.015544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "19700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 49 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> S3SettingsConversion::FoldersStrictStyle [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> ResourcePoolClassifiersDdl::TestDropResourcePool [FAIL] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:51.673163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:51.799096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:51.821462Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:51.822170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:51.822302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:52.290214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:52.290391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:52.298373Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046927802612 != 1768046927802615 2026-01-10T12:08:52.319857Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:52.380826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:52.475201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:52.822320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.822407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.822463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.822581Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-10T12:08:52.822623Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:08:52.971567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:08:52.971826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.972094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:08:52.972150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:08:52.972385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:08:52.972494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:52.972598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.973425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:08:52.973592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:08:52.973643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.973682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.973874Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.973914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.974010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.974074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:08:52.974127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:52.974174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:52.974285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.974912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.974960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.975097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.975152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.975217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.975268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:08:52.975311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:52.975389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.975813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.975844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.975955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.975988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.976040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.976098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.976149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:08:52.976188Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.976231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:52.980458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:52.981112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.981166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:52.981308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:08:52.981625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-10T12:08:52.982046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.982112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.982153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.982312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-10T12:08:52.982357Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:08:52.982450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... er action and tx pending commits 2026-01-10T12:10:40.723441Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.723477Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.746872Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:40.746951Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.746987Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:40.747035Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.747079Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.782737Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:40.782814Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.782853Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:40.782896Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.782932Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.806805Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:40.806895Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.806934Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:40.806976Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.807011Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.829370Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:40.829470Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.829514Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:40.829553Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.829598Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.854322Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [9:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:10:40.854412Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:10:40.854510Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [9:399:2398], Recipient [9:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:10:40.854543Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:10:40.892165Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:10:40.892345Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:10:40.892392Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.892431Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:10:40.892494Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:10:40.892534Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:10:40.919533Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1772:3366], Recipient [9:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-10T12:10:40.919671Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found incremental backup table at: /Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table 2026-01-10T12:10:40.922526Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1774:3368], Recipient [9:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-10T12:10:40.922635Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} ... waiting for SysViewsRoster update finished 2026-01-10T12:10:48.065211Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:48.087859Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:10:48.101623Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:290:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:10:48.102240Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:48.102352Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:10:48.626747Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:48.626976Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:48.635500Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1768047041989370 != 1768047041989374 2026-01-10T12:10:48.643273Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:48.704050Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:48.797502Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:10:49.137781Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:49.154205Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:49.291148Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:49.641763Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:49.641961Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:49.642111Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:49.643767Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:49.644024Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:49.650931Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:49.805387Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:10:49.843540Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 |86.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] Test command err: 2026-01-10T12:10:36.185968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704200868799718:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:36.186002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:36.752730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:10:36.758014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:36.758128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:36.763121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:37.008523Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704200868799496:2081] 1768047036022946 != 1768047036022949 2026-01-10T12:10:37.044313Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:37.051757Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:37.123245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:10:37.296778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:37.296798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:37.296804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:37.296900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:37.657269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:37.664607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:10:37.972023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:10:37.982374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:10:37.986551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:38.012838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:10:38.109457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.499179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.606485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:10:38.611810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.716879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.816505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.895860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.938738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:38.991151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.041203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.099052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:41.082509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704200868799718:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:41.082603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:41.490923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222343637660:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.491015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222343637651:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.491159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.493811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704222343637666:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.493907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:41.495964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:41.513966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704222343637665:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:10:41.614929Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704222343637718:3121] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:10:42.126854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:42.196573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:42.268376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pr ... \t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\030\207\203\001H\207\203\010\207\203\001H\207\203\001H\207\203\014\207\203\001H\207\203\010\207\203\001H\207\203\010\207\203\010\207\203\004\207\203\010\026\032\036\"&*.26:>BJ\000\003?\006\014queues\t\211\004?:\205\004?:\203\014\020List$Truncated\203\004F\000\t\211\026?B\203\005\004\200\205\030\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\026\032\036\"&*.26:>B\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?H \000\001\205\000\000\000\000\001\'\000\000\000\000\000\000\000?8\005?b\003?J\002\003?L\016\003?N\026\003?P\034\003?R\n\003?T\030\003?V\024\003?X\004\003?Z\010\003?\\\020\003?^\036\003?`\032\377\017\013?h\t\351\000?d\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\236\003?\240\022FROM_USER\003\022\000\t\351\000?f\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2026-01-10T12:10:52.599276Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.599321Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 218ms 2026-01-10T12:10:52.599725Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.599756Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:10:52.599845Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 218ms 2026-01-10T12:10:52.600342Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.604465Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704269341227529:2440]: Pool not found 2026-01-10T12:10:52.605211Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:10:52.847247Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704269341227520:2438]: Pool not found 2026-01-10T12:10:52.847378Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:10:52.850057Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704269341227642:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:52.850131Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704269341227643:2459], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:10:52.850172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:52.850481Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704269341227646:2460], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:52.850538Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:53.132951Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704269341227640:2457]: Pool not found 2026-01-10T12:10:53.133538Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-10T12:10:53.333900Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:44728) incoming connection opened 2026-01-10T12:10:53.333985Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:44728) -> (POST /Root) 2026-01-10T12:10:53.334118Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [9882:7f4e:827b:0:8082:7f4e:827b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 4fb21520-b0d402cd-8d448b87-ed3d4b7a 2026-01-10T12:10:53.334586Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [4fb21520-b0d402cd-8d448b87-ed3d4b7a] reply with status: BAD_REQUEST message: Empty body 2026-01-10T12:10:53.334790Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:44728) <- (400 InvalidArgumentException, 60 bytes) 2026-01-10T12:10:53.334844Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:44728) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip 2026-01-10T12:10:53.334897Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:44728) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 4fb21520-b0d402cd-8d448b87-ed3d4b7a Content-Type: application/x-amz-json-1.1 Content-Length: 60 2026-01-10T12:10:53.336270Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:44728) connection closed |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] Test command err: 2026-01-10T12:10:37.012228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704204230657693:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:37.018443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:37.697281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:37.697382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:37.754936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:37.820889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:10:37.961182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704204230657669:2081] 1768047037006156 != 1768047037006159 2026-01-10T12:10:37.970306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:38.092826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:38.098617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:10:38.293526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:38.293541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:38.293547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:38.293638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:38.714255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:38.722104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:10:39.096307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:10:39.116361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:10:39.121740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:39.152997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:10:39.241139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.442131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.526588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.647661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-10T12:10:39.656304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.756248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.837682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.894550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:39.967570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:40.062507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:40.137294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:42.020300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704204230657693:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:42.020397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:42.216848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704225705495811:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:42.217003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:42.218390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704225705495823:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:42.218464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704225705495824:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:42.218504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:42.229845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:42.253573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704225705495827:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:10:42.335566Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704225705495878:3109] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:10:42.910889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:43.011667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:43.081387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pr ... 005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2026-01-10T12:10:52.557836Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.566029Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.566065Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 8ms 2026-01-10T12:10:52.566455Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.566496Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:10:52.566579Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 8ms 2026-01-10T12:10:52.567023Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:10:52.796852Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704267612046515:2435]: Pool not found 2026-01-10T12:10:52.797469Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:10:53.073631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704267612046498:2434]: Pool not found 2026-01-10T12:10:53.073810Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:10:53.076670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704271907013934:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:53.076816Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:53.076841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704271907013936:2458], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:10:53.078569Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704271907013938:2459], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:53.078642Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:53.392910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704271907013931:2456]: Pool not found 2026-01-10T12:10:53.393102Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:10:53.524718Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:34710) incoming connection opened 2026-01-10T12:10:53.524800Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:34710) -> (POST /Root, 44 bytes) 2026-01-10T12:10:53.525027Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f84e:28f5:7e7b:0:e04e:28f5:7e7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: d6b794c-f7be3d32-22ff0b92-20f3acd2 2026-01-10T12:10:53.525771Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [d6b794c-f7be3d32-22ff0b92-20f3acd2] reply with status: BAD_REQUEST message: Can not parse request body from JSON 2026-01-10T12:10:53.525958Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:34710) <- (400 InvalidArgumentException, 86 bytes) 2026-01-10T12:10:53.526024Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:34710) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip Content-Length: 44 nhV ,M-MKMURPrH-IU2j 2026-01-10T12:10:53.526054Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:34710) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: d6b794c-f7be3d32-22ff0b92-20f3acd2 Content-Type: application/x-amz-json-1.1 Content-Length: 86 Http output full {"__type":"InvalidArgumentException","message":"Can not parse request body from JSON"} 2026-01-10T12:10:53.528423Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:34710) connection closed |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> TConsoleConfigTests::TestAllowedScopes >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TableCreation::UpdateTableAcl [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2026-01-10T12:08:42.413348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703708063384713:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:42.413415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:42.817912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:42.964684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:42.964795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:43.016848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703708063384608:2081] 1768046922389916 != 1768046922389919 2026-01-10T12:08:43.023644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:43.030715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:43.036155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:43.380694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:43.380715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:43.380723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:43.380821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:43.448309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:43.752328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:43.769159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:08:46.617625Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:46.626713Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:46.626791Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:46.626940Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703725243254659:2322], Start check tables existence, number paths: 2 2026-01-10T12:08:46.637551Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZTQ5NjVhNC04MjYxNTgxYS05ZjQ3MTQ0MC03NGM4ZTRhYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZTQ5NjVhNC04MjYxNTgxYS05ZjQ3MTQ0MC03NGM4ZTRhYQ== temp_dir_name# 3b5f6bbd-48de-a064-9291-3aa6980c78ce trace_id# 2026-01-10T12:08:46.637997Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZTQ5NjVhNC04MjYxNTgxYS05ZjQ3MTQ0MC03NGM4ZTRhYQ==, ActorId: [1:7593703725243254686:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:46.638399Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703725243254659:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:46.638459Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703725243254659:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:46.638493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703725243254659:2322], Successfully finished 2026-01-10T12:08:46.654656Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:46.654817Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:46.700054Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:46.704890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:46.716269Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-10T12:08:46.719318Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:46.719493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:08:46.720983Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:08:46.816214Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:46.866692Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703725243254739:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:46.866916Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703725243254688:2502], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:46.870252Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ= temp_dir_name# 21fd5f32-4148-9b1b-1533-8ba6f4d2755f trace_id# 2026-01-10T12:08:46.870898Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:46.870913Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:08:46.870960Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:46.870993Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ=, ActorId: [1:7593703725243254747:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:46.871265Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ=, ActorId: [1:7593703725243254747:2327], ActorState: ReadyState, LegacyTraceId: 01kekwy70q59jxqvjchwa4jag1, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7593703725243254746:2540] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-10T12:08:46.871339Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7593703725243254747:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ= 2026-01-10T12:08:46.871393Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703725243254749:2328], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:08:46.871472Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703725243254750:2329], Database: /Root, Start database fetching 2026-01-10T12:08:46.877696Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703725243254750:2329], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-10T12:08:46.877885Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703725243254749:2328], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:08:46.877941Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-10T12:08:46.877984Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-10T12:08:46.878000Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:08:46.878299Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7593703725243254760:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjhmZDk4OGEtNWFiMjIyNDktNzNjMzFkY2YtMTVhNDVlOGQ=, Start pool fetching 2026-01-10T12:08:46.878403Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7593703725243254761:2331], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:08:46.878467Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_a ... 6-01-10T12:10:56.658823Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:56.662404Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593704283395440875:2510], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:10:56.665224Z node 7 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593704283395440875:2510], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:10:56.674960Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593704283395440875:2510], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:10:56.750180Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593704283395440875:2510], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:10:56.754897Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7593704283395440926:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:10:56.755117Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593704283395440875:2510], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:10:56.763931Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ= temp_dir_name# 3efb7e88-41b9-18d6-ce20-ee8d64c3e241 trace_id# 2026-01-10T12:10:56.764218Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:10:56.768913Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:10:56.768945Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:10:56.768996Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:10:56.769143Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: ReadyState, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [7:7593704283395440932:2547] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-10T12:10:56.769200Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [7:7593704283395440933:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ= 2026-01-10T12:10:56.769269Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593704283395440935:2329], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:10:56.769371Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7593704283395440936:2330], Database: /Root, Start database fetching 2026-01-10T12:10:56.774138Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7593704283395440936:2330], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-10T12:10:56.774341Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593704283395440935:2329], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:10:56.774417Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-10T12:10:56.774496Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-10T12:10:56.774517Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:10:56.774792Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7593704283395440947:2332], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:10:56.774867Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [7:7593704283395440946:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, Start pool fetching 2026-01-10T12:10:56.774914Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593704283395440948:2333], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:10:56.777737Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593704283395440948:2333], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:10:56.777825Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7593704283395440947:2332], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2026-01-10T12:10:56.777947Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [7:7593704283395440946:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, Pool info successfully resolved 2026-01-10T12:10:56.778060Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ= 2026-01-10T12:10:56.778151Z node 7 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:297: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ= 2026-01-10T12:10:56.778414Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: ExecuteState, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id status# PRECONDITION_FAILED issues# { message: "Resource pool sample_pool_id was disabled due to zero concurrent query limit" severity: 1 } trace_id# 2026-01-10T12:10:56.778565Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: ExecuteState, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-10T12:10:56.778825Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [7:7593704283395440933:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ= 2026-01-10T12:10:56.778891Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: CleanupState, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, EndCleanup is_final# true trace_id# 2026-01-10T12:10:56.779020Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: CleanupState, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Sent query response back to proxy proxy_request_id# 3 proxy_id# [7:7593704261920603731:2262] trace_id# 2026-01-10T12:10:56.779064Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: unknown state, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:56.779211Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=ZmNjNzBjYzEtNjM3NWVmZWUtMjM4OGFlYmYtMzcwNmUwYmQ=, ActorId: [7:7593704283395440933:2328], ActorState: unknown state, LegacyTraceId: 01kekx25w1e4ejr94p2mfpsh91, Session actor destroyed trace_id# 2026-01-10T12:10:56.824422Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=7&id=NmNiNzI5Y2QtMzcwYTMzYi01YjhiNzMyZC02OTQyMjBhNg==, ActorId: [7:7593704283395440848:2325], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:56.824511Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=NmNiNzI5Y2QtMzcwYTMzYi01YjhiNzMyZC02OTQyMjBhNg==, ActorId: [7:7593704283395440848:2325], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:56.824565Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=NmNiNzI5Y2QtMzcwYTMzYi01YjhiNzMyZC02OTQyMjBhNg==, ActorId: [7:7593704283395440848:2325], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:56.824616Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=NmNiNzI5Y2QtMzcwYTMzYi01YjhiNzMyZC02OTQyMjBhNg==, ActorId: [7:7593704283395440848:2325], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:56.824744Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=NmNiNzI5Y2QtMzcwYTMzYi01YjhiNzMyZC02OTQyMjBhNg==, ActorId: [7:7593704283395440848:2325], ActorState: unknown state, Session actor destroyed trace_id# |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] Test command err: 2026-01-10T12:10:21.641892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:21.641964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:22.068430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:24.907668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:24.907742Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:25.044505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:26.653498Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:26.653583Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:26.749438Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:28.220012Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:28.224380Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:28.337554Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:29.865356Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:29.865435Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:30.037135Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:31.190167Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:31.190276Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:31.313555Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:32.806651Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:32.806741Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:32.997529Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:37.738064Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:37.738143Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:37.853961Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:42.857993Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:42.858086Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:43.057575Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:44.469411Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:44.469494Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:44.673541Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:45.429456Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 11 2026-01-10T12:10:45.429580Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 11 2026-01-10T12:10:45.429627Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 11 2026-01-10T12:10:45.430601Z node 11 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [11:462:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-10T12:10:46.537230Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:46.537313Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:46.657901Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:48.619133Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:48.619208Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:48.721660Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:50.608244Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:50.608333Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:50.770851Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:52.583780Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:52.583869Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:52.717741Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:54.629931Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:54.630029Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:54.800363Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:56.128300Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:56.128396Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:56.264780Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:58.472449Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.472535Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.572352Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::UpdateTableAcl [GOOD] Test command err: 2026-01-10T12:10:27.724573Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704159495598633:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:27.727154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:27.955845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:27.956014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:28.094913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:28.220294Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704159495598415:2081] 1768047027685427 != 1768047027685430 2026-01-10T12:10:28.220637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:28.676853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:28.676880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:28.676887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:28.676984Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:28.715792Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:28.798298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:28.816878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:10:31.970493Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:10:31.977525Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:10:31.977592Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:10:31.977612Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:10:31.980690Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Describe result: PathErrorUnknown 2026-01-10T12:10:31.980719Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Creating table 2026-01-10T12:10:32.000193Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:10:32.000413Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Describe result: PathErrorUnknown 2026-01-10T12:10:32.000423Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Creating table 2026-01-10T12:10:32.000460Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:10:32.000578Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Describe result: PathErrorUnknown 2026-01-10T12:10:32.000588Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Creating table 2026-01-10T12:10:32.000602Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:10:32.018092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:32.022973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:32.025055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:32.032576Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:10:32.032646Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Subscribe on create table tx: 281474976710659 2026-01-10T12:10:32.033456Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:10:32.033500Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Subscribe on create table tx: 281474976710660 2026-01-10T12:10:32.033768Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:10:32.033795Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Subscribe on create table tx: 281474976710658 2026-01-10T12:10:32.038725Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:10:32.038757Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:10:32.038855Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:10:32.157585Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:10:32.196282Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:10:32.207675Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593704176675468448:2497] Owner: [1:7593704176675468445:2494]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:10:32.248582Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-10T12:10:32.248665Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Column diff is empty, finishing 2026-01-10T12:10:32.249716Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:10:32.250748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:32.252751Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:10:32.252783Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7593704176675468446:2495] Owner: [1:7593704176675468445:2494]. Successful alter request: ExecComplete 2026-01-10T12:10:32.288394Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:10:32.288439Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Column diff is empty, finishing 2026-01-10T12:10:32.288524Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704176675468447:2496] Owner: [1:7593704176675468445:2494]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:10:32.289460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operati ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-10T12:10:57.216286Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzRhNjhmNmEtZDhiMTBmYjctNWYzMjkxYWYtYTQ1YjllMGM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7593704291855162404:2515] 2026-01-10T12:10:57.216339Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7593704291855162406:2949] 2026-01-10T12:10:57.229020Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7593704291855162405:2516], selfId: [3:7593704261790389837:2248], source: [3:7593704291855162404:2515] 2026-01-10T12:10:57.230108Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704291855162401:2947], ActorId: [3:7593704291855162402:2948], TraceId: ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NzRhNjhmNmEtZDhiMTBmYjctNWYzMjkxYWYtYTQ1YjllMGM=, TxId: 2026-01-10T12:10:57.230685Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704291855162401:2947], ActorId: [3:7593704291855162402:2948], TraceId: ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NzRhNjhmNmEtZDhiMTBmYjctNWYzMjkxYWYtYTQ1YjllMGM=, TxId: 2026-01-10T12:10:57.230712Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704291855162401:2947], ActorId: [3:7593704291855162402:2948], TraceId: ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-10T12:10:57.230808Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704291855162400:2946], ActorId: [3:7593704291855162401:2947], TraceId: ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3, RequestDatabase: /dc-1, Got response [3:7593704291855162402:2948] SUCCESS 2026-01-10T12:10:57.230854Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704291855162399:2945] ActorId: [3:7593704291855162400:2946] Database: /dc-1 ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3. Extracted script execution operation [3:7593704291855162402:2948], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7593704278970259890:2674], LeaseGeneration: 0 2026-01-10T12:10:57.230878Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704291855162399:2945] ActorId: [3:7593704291855162400:2946] Database: /dc-1 ExecutionId: 1427e182-c84cc9c8-aade9023-30d6cdf3. Reply success 2026-01-10T12:10:57.235373Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzRhNjhmNmEtZDhiMTBmYjctNWYzMjkxYWYtYTQ1YjllMGM=, workerId: [3:7593704291855162404:2515], local sessions count: 0 2026-01-10T12:10:57.270311Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx26bn39je737475frv2sx", Request has 18444976026652.281331s seconds to be completed 2026-01-10T12:10:57.272970Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx26bn39je737475frv2sx", Created new session, sessionId: ydb://session/3?node_id=3&id=MTU2OWMwMjItZDJkMmU4YTYtYmM0MTdmOGEtMjcwNTJhNGQ=, workerId: [3:7593704291855162434:2529], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:10:57.273207Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx26bn39je737475frv2sx 2026-01-10T12:10:57.290714Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx26cabneh70pj70463m19, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MTU2OWMwMjItZDJkMmU4YTYtYmM0MTdmOGEtMjcwNTJhNGQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7593704291855162434:2529] 2026-01-10T12:10:57.290771Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7593704291855162437:2957] 2026-01-10T12:10:57.308944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:57.313520Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx26cabneh70pj70463m19", Forwarded response to sender actor, requestId: 42, sender: [3:7593704291855162436:2530], selfId: [3:7593704261790389837:2248], source: [3:7593704291855162434:2529] --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:10:57.320566Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Describe result: PathErrorUnknown 2026-01-10T12:10:57.320595Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Creating table 2026-01-10T12:10:57.320630Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-10T12:10:57.327710Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:57.328743Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-10T12:10:57.328784Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Subscribe on create table tx: 281474976710687 2026-01-10T12:10:57.330670Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Subscribe on tx: 281474976710687 registered 2026-01-10T12:10:57.356631Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-01-10T12:10:57.455549Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:10:57.455594Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593704291855162448:2964] Owner: [3:7593704291855162447:2963]. Column diff is empty, finishing 2026-01-10T12:10:57.474061Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx26j1bg76qwczte5jx9zy", Request has 18444976026652.077587s seconds to be completed 2026-01-10T12:10:57.476719Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx26j1bg76qwczte5jx9zy", Created new session, sessionId: ydb://session/3?node_id=3&id=ZDEwZDU4MjYtYTc4M2FlMTItNTUzNTY4YjctMzg1MjBmNzg=, workerId: [3:7593704291855162583:2542], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:10:57.476952Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx26j1bg76qwczte5jx9zy 2026-01-10T12:10:57.504398Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593704291855162589:3071] Owner: [3:7593704291855162588:3070]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:10:57.504434Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593704291855162589:3071] Owner: [3:7593704291855162588:3070]. Column diff is empty, finishing 2026-01-10T12:10:57.504527Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7593704291855162589:3071] Owner: [3:7593704291855162588:3070]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-10T12:10:57.505616Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:57.507503Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593704291855162589:3071] Owner: [3:7593704291855162588:3070]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:10:57.507512Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7593704291855162589:3071] Owner: [3:7593704291855162588:3070]. Successful alter request: ExecComplete 2026-01-10T12:10:57.509926Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZDEwZDU4MjYtYTc4M2FlMTItNTUzNTY4YjctMzg1MjBmNzg=, workerId: [3:7593704291855162583:2542], local sessions count: 1 2026-01-10T12:10:57.515753Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx26kbayf15v9k22vea46g", Request has 18444976026652.035905s seconds to be completed 2026-01-10T12:10:57.518677Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx26kbayf15v9k22vea46g", Created new session, sessionId: ydb://session/3?node_id=3&id=YTBmNzliMy1jYWQxZDQwNi05M2FmNzJhYS1kNDM5OGI2MA==, workerId: [3:7593704291855162601:2546], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:10:57.518924Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx26kbayf15v9k22vea46g 2026-01-10T12:10:57.548578Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YTBmNzliMy1jYWQxZDQwNi05M2FmNzJhYS1kNDM5OGI2MA==, workerId: [3:7593704291855162601:2546], local sessions count: 1 2026-01-10T12:10:57.561651Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MTU2OWMwMjItZDJkMmU4YTYtYmM0MTdmOGEtMjcwNTJhNGQ=, workerId: [3:7593704291855162434:2529], local sessions count: 0 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:58.669201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:58.669312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.669364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:58.669401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:58.669440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:58.669467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:58.669526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.669609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:58.670452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:58.670749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:58.758591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.758646Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.786119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:58.786219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:58.786398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:58.807670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:58.807898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:58.808668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:58.808886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:58.811065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.811232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:58.812372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:58.812429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.812499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:58.812542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:58.812587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:58.812862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:58.970834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.972038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.980513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.980715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.980824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.980910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.980994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.981832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:11:00.349111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.349307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.349368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.349490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.349712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.349835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.350397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.350749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.350914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.350986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.351165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.351233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.351294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:00.351706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:11:00.362501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:00.362795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:00.369073Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:935:2866], Recipient [1:935:2866]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:00.369148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:00.373140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:00.373247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:00.373807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:00.373886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:00.373958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:00.374010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:00.374290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:971:2866], Recipient [1:935:2866]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:00.374339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:00.374378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:935:2866] sender: [1:991:2058] recipient: [1:15:2062] 2026-01-10T12:11:00.425406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:990:2909], Recipient [1:935:2866]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:11:00.425483Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:00.425613Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:00.425946Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 213us result status StatusSuccess 2026-01-10T12:11:00.426558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:00.427359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:992:2910], Recipient [1:935:2866]: NKikimrPQ.TEvPeriodicTopicStats PathId: 38 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2026-01-10T12:11:00.427420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-10T12:11:00.427470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 19 UsedReserveSize 7 2026-01-10T12:11:00.427517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-10T12:11:00.427624Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:141: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2026-01-10T12:11:00.427940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:993:2911], Recipient [1:935:2866]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:11:00.427982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:00.428097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:00.428303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 215us result status StatusSuccess 2026-01-10T12:11:00.428894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:58.882467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:58.882566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.882630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:58.882681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:58.882724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:58.882758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:58.882829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.883143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:58.884182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:58.884531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:58.985681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.985749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:59.003588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:59.003715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:59.003923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:59.013414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:59.013680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:59.014450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:59.014713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:59.017208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.017411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:59.018690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:59.018755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.018826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:59.018877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:59.018924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:59.019227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:59.175597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.176682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.176828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.176978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.177863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2026-01-10T12:11:00.753151Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2026-01-10T12:11:00.753198Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-10T12:11:00.754539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:00.756783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:11:00.756823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:00.757070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:11:00.757426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:11:00.757460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:00.759866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:11:00.759998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:11:00.760152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [1:1315:3166], Recipient [1:695:2682]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1315:3166] ServerId: [1:1317:3168] } 2026-01-10T12:11:00.760192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T12:11:00.760221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:11:00.760740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:11:00.760780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:11:00.761313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1331:3182], Recipient [1:695:2682]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:11:00.761372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:11:00.761414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:11:00.761568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:920:2854], Recipient [1:695:2682]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2026-01-10T12:11:00.761606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:11:00.761670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:11:00.761766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:11:00.761807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1329:3180] 2026-01-10T12:11:00.762008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:1331:3182], Recipient [1:695:2682]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:11:00.762048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:11:00.762079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-10T12:11:00.762671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1332:3183], Recipient [1:695:2682]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:11:00.762715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:00.762865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:00.763085Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 271us result status StatusSuccess 2026-01-10T12:11:00.763556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:00.764655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:1333:3184], Recipient [1:695:2682]: NKikimrPQ.TEvPeriodicTopicStats PathId: 40 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2026-01-10T12:11:00.764707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-10T12:11:00.764748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 40] DataSize 151 UsedReserveSize 151 2026-01-10T12:11:00.764809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-10T12:11:00.765313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1334:3185], Recipient [1:695:2682]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:11:00.765353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:00.765428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:00.767877Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 149us result status StatusSuccess 2026-01-10T12:11:00.768453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:58.522743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:58.522832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.522900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:58.522934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:58.522967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:58.523003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:58.523080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.523160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:58.524150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:58.524430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:58.614218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.614275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.629568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:58.629679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:58.629861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:58.641915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:58.642166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:58.642758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:58.642964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:58.644982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.645139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:58.646062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:58.646120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.646181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:58.646224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:58.646269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:58.646478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:58.785310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.786957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.787471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:10:59.934181Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:946:2869] connected; active server actors: 1 2026-01-10T12:10:59.957444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:10:59.957711Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 200us result status StatusSuccess 2026-01-10T12:10:59.958185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:00.635233Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-10T12:11:00.635334Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 1 2026-01-10T12:11:00.636157Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 0 2026-01-10T12:11:00.636245Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-10T12:11:00.636584Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2026-01-10T12:11:00.636677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-10T12:11:00.649653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:11:01.060617Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-10T12:11:01.060706Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2026-01-10T12:11:01.061574Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2026-01-10T12:11:01.061624Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-10T12:11:01.061858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-10T12:11:01.078021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:11:01.484689Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-10T12:11:01.484805Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-10T12:11:01.485730Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2026-01-10T12:11:01.485796Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-10T12:11:01.486059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-10T12:11:01.499319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:11:01.544972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:01.545251Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 329us result status StatusSuccess 2026-01-10T12:11:01.545779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:01.546615Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1021:2934] connected; active server actors: 1 2026-01-10T12:11:01.562747Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2026-01-10T12:11:01.563224Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-10T12:11:01.566255Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2026-01-10T12:11:01.590159Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1065:2968] connected; active server actors: 1 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [FAIL] Test command err: 2026-01-10T12:08:41.138903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703705883376983:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:41.139083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:41.700317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:41.701273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:41.701957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:41.710623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:41.911785Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:41.932219Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703705883376847:2081] 1768046921023254 != 1768046921023257 2026-01-10T12:08:41.932293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:41.953839Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:08:41.962229Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:08:42.109208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:42.134787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:42.134822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:42.134829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:42.134995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:42.494044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:42.554324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:42.568670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:08:45.476487Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:45.482017Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703723063246899:2322], Start check tables existence, number paths: 2 2026-01-10T12:08:45.482746Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:45.482785Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:45.485109Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NzQ5MDg0MDctNTIzMjQ1MmItZWVjOTY2MzYtZTBiYjJkMzI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NzQ5MDg0MDctNTIzMjQ1MmItZWVjOTY2MzYtZTBiYjJkMzI= temp_dir_name# 88c26d7e-4aba-de67-c2ae-0dbc103258eb trace_id# 2026-01-10T12:08:45.485342Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703723063246899:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:45.485432Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703723063246899:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:45.485464Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703723063246899:2322], Successfully finished 2026-01-10T12:08:45.485680Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:45.485720Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:45.485823Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NzQ5MDg0MDctNTIzMjQ1MmItZWVjOTY2MzYtZTBiYjJkMzI=, ActorId: [1:7593703723063246926:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:45.502705Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:45.508149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:45.511268Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:08:45.523344Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:45.535716Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:08:45.632206Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:45.715235Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703723063246979:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:08:45.715400Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703723063246928:2504], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:08:45.718301Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NmRjMTUyMi03NDBhZTczMS1kMDVkZGI1MS04Y2JlYWNiZg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NmRjMTUyMi03NDBhZTczMS1kMDVkZGI1MS04Y2JlYWNiZg== temp_dir_name# 178d34ab-4e8d-378e-d345-cca70225757f trace_id# 2026-01-10T12:08:45.718901Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:45.718914Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:08:45.718958Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-10T12:08:45.718987Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NmRjMTUyMi03NDBhZTczMS1kMDVkZGI1MS04Y2JlYWNiZg==, ActorId: [1:7593703723063246986:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:45.719209Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NmRjMTUyMi03NDBhZTczMS1kMDVkZGI1MS04Y2JlYWNiZg==, ActorId: [1:7593703723063246986:2327], ActorState: ReadyState, LegacyTraceId: 01kekwy5wq3n6nkeqx3taq1jre, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7593703723063246985:2541] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-10T12:08:45.719258Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7593703723063246986:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NmRjMTUyMi03NDBhZTczMS1kMDVkZGI1MS04Y2JlYWNiZg== 2026-01-10T12:08:45.719310Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703723063246988:2328], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:08:45.719383Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703723063246989:2329], Database: /Root, Start database fetching 2026-01-10T12:08:45.721018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703723063246988:2328], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:08:45.721074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-10T12:08:45.721091Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:08:45.721306Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7593703723063246999:2330], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:08:45.721360Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593703723063246989:2329], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-10T12:08:45.721402Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-10T12:08:45.721444Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [Workloa ... BlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:51.678449Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: ReadyState, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:10:51.678560Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: ReadyState, Created new KQP executer executer_id# [8:7593704262710272660:3428] is_rollback# true trace_id# 2026-01-10T12:10:51.678627Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:51.680941Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: CleanupState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:51.680991Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:51.681184Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZGVjY2I4OGQtMmZjMjBlOTYtZWZjN2Y0NGItOTBhNzM2MGY=, ActorId: [8:7593704262710272629:3428], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:10:51.698738Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Acquire mvcc snapshot trace_id# 2026-01-10T12:10:51.704752Z node 8 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Read snapshot result status# UNAVAILABLE step# 1768047051715 tx_id# 18446744073709551615 trace_id# 2026-01-10T12:10:51.704845Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 137096823468824 trace_id# 2026-01-10T12:10:51.704898Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:10:51.705070Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Created new KQP executer executer_id# [8:7593704262710272668:3424] is_rollback# false trace_id# 2026-01-10T12:10:51.711190Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:10:51.711372Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, TxInfo status# Active kind# ReadOnly total_duration# 0 server_duration# 12.633 queries_count# 2 trace_id# 2026-01-10T12:10:51.711505Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:10:51.711936Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:51.711976Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, EndCleanup is_final# false trace_id# 2026-01-10T12:10:51.712052Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ExecuteState, LegacyTraceId: 01kekx20nn5bdrxfpan7r7r449, Sent query response back to proxy proxy_request_id# 217 proxy_id# [8:7593703996422295933:2265] trace_id# 2026-01-10T12:10:51.713083Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:51.713169Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ReadyState, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:10:51.713284Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ReadyState, Created new KQP executer executer_id# [8:7593704262710272679:3424] is_rollback# true trace_id# 2026-01-10T12:10:51.713356Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:51.714158Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: CleanupState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:51.714204Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:51.714384Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NjY3ZGZkZjQtMWJiNWM4YjUtNmRiMTNmN2YtYTQxZWEwMjY=, ActorId: [8:7593704262710272623:3424], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:10:52.010833Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=NGE5MTFlZTgtZDUwMDczOTctZWYzYTUxMzctMWM2Y2U4ZDA=, ActorId: [8:7593704022192100328:2325], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:10:52.010910Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=NGE5MTFlZTgtZDUwMDczOTctZWYzYTUxMzctMWM2Y2U4ZDA=, ActorId: [8:7593704022192100328:2325], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:10:52.010950Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=NGE5MTFlZTgtZDUwMDczOTctZWYzYTUxMzctMWM2Y2U4ZDA=, ActorId: [8:7593704022192100328:2325], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:10:52.010988Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=NGE5MTFlZTgtZDUwMDczOTctZWYzYTUxMzctMWM2Y2U4ZDA=, ActorId: [8:7593704022192100328:2325], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:10:52.011082Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=NGE5MTFlZTgtZDUwMDczOTctZWYzYTUxMzctMWM2Y2U4ZDA=, ActorId: [8:7593704022192100328:2325], ActorState: unknown state, Session actor destroyed trace_id# assertion failed at ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:778, static void NKikimr::NKqp::NWorkload::IYdbSetup::WaitFor(TDuration, TString, std::function): (false) Waiting Resource pool classifier success timeout. Spent time 20.311962s exceeds limit 20.000000s 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B81ECEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BD0FD0B 2. /tmp//-S/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:778: WaitFor @ 0x38D37112 3. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:828: WaitForSuccess @ 0x1B38C2D6 4. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:911: Execute_ @ 0x1B391918 5. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756: operator() @ 0x1B48CAF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756:1) &> @ 0x1B48CAF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756:1) &> @ 0x1B48CAF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B48CAF7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B48CAF7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BD3F149 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BD3F149 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BD3F149 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BD169E7 14. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756: Execute @ 0x1B48BE33 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BD1819F 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BD38FAC 17. ??:0: ?? @ 0x7FF058636D8F 18. ??:0: ?? @ 0x7FF058636E3F 19. ??:0: ?? @ 0x18A2E028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:59.364985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:59.365070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.365145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:59.365187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:59.365226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:59.365255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:59.365317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.365413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:59.366285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:59.366857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:59.459154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:59.459207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:59.476137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:59.476245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:59.476592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:59.486079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:59.486340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:59.487154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:59.487396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:59.489718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.489918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:59.491185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:59.491257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.491322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:59.491370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:59.491416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:59.491708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:59.671331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.672999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.673599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.380231Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-10T12:11:02.422199Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:11:02.422274Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422306Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.422332Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422354Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-10T12:11:02.422418Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:11:02.422433Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422448Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.422462Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422480Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-10T12:11:02.422608Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:02.422636Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.422681Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.422706Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-10T12:11:02.453725Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:11:02.453817Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.453858Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.453894Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.453924Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-10T12:11:02.454018Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:11:02.454043Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.454065Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.454090Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.454133Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-10T12:11:02.454189Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:02.454213Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.454239Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.454277Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.454299Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-10T12:11:02.489090Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-10T12:11:02.489182Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-10T12:11:02.489630Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-10T12:11:02.489763Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-10T12:11:02.489838Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-10T12:11:02.490342Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2026-01-10T12:11:02.490403Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-10T12:11:02.490706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 16975298 2026-01-10T12:11:02.508863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:11:02.530096Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:11:02.530154Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530195Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.530231Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530257Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-10T12:11:02.530308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:11:02.530328Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530343Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.530362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530377Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-10T12:11:02.530404Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:02.530421Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530438Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:02.530464Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:02.530497Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-10T12:11:02.551952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:02.552246Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 341us result status StatusSuccess 2026-01-10T12:11:02.552824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> ColumnShardTiers::TTLUsage >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthLower |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:59.532396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:59.532487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.532535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:59.532585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:59.532624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:59.532654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:59.532727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.532829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:59.533670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:59.533934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:59.625640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:59.625701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:59.640451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:59.640551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:59.640717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:59.654236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:59.654512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:59.655282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:59.655524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:59.658083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.658252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:59.659410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:59.659475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.659551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:59.659598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:59.659644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:59.659917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:59.827327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.828946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.829643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... thId: 37] was 0 2026-01-10T12:11:05.075379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2026-01-10T12:11:05.075626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.075743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:05.075787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:05.075939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:05.076181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:05.076537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-10T12:11:05.076948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.077079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.077576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.077670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.078750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.079289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.079618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.079728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.079790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.079970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.080038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.080125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:05.088325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:05.094563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:05.094660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:05.095987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:05.096060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:05.096145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:05.098346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1163:3116] sender: [1:1218:2058] recipient: [1:15:2062] 2026-01-10T12:11:05.143149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:05.143475Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 365us result status StatusSuccess 2026-01-10T12:11:05.144004Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:05.146629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:05.146885Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 284us result status StatusSuccess 2026-01-10T12:11:05.147445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleConfigTests::TestAffectedConfigs [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2026-01-10T12:10:27.696911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:27.697043Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:27.984721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:29.987858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:29.987955Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:30.105790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:31.828809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:31.828893Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:31.977716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:33.539241Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:33.539313Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:33.683828Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:35.024411Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:10:35.024500Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:35.144575Z node 4 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-10T12:10:35.957850Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:35.957940Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:36.107888Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:37.251052Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:10:37.251147Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:37.352331Z node 5 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[5:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-10T12:10:37.994362Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:37.994438Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:38.138306Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:42.760998Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:42.761109Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:42.884458Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:46.913801Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:46.913883Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:47.016391Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:48.730422Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:48.730503Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:48.854649Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:50.475092Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:50.475182Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:50.585577Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:52.109276Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:52.109365Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:52.228858Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:53.669089Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:53.669176Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:53.785677Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:55.204857Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:55.204927Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:55.334532Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:56.748994Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:56.749082Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:56.853699Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:58.098241Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.098395Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.213241Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:59.505304Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:59.505398Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:59.636582Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:00.917864Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:00.917959Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:01.037982Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:02.270502Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:02.270593Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:02.420268Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:04.109758Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:04.109854Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:04.231154Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:05.515647Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:05.515744Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:05.620403Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:58.631956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:58.632052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.632123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:58.632170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:58.632232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:58.632270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:58.632369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:58.632470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:58.633382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:58.633695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:58.728333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:58.728397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.743781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:58.743912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:58.744103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:58.757997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:58.758263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:58.759093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:58.759361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:58.761843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.762043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:58.763175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:58.763244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.763322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:58.763374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:58.763422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:58.763692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:58.893054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.896708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.896894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.896993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.897898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.898007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T12:11:06.920931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-10T12:11:06.921369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.921538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.922932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.923158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.923267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.923788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.924651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:06.925060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:11:06.934215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:06.934434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:06.939003Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1425:3369], Recipient [1:1425:3369]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:06.939067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:06.940167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:06.940240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:06.940588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1425:3369], Recipient [1:1425:3369]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:06.940639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:06.940823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:06.940867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:06.940909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:06.940959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:06.941194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1461:3369], Recipient [1:1425:3369]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:06.941224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:06.941252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1425:3369] sender: [1:1480:2058] recipient: [1:15:2062] 2026-01-10T12:11:06.980300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1479:3412], Recipient [1:1425:3369]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:11:06.980392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:06.980536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:11:06.980839Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 286us result status StatusSuccess 2026-01-10T12:11:06.981672Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> TableCreator::CreateTables >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink >> TGRpcStreamingTest::ClientNeverWrites >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:59.267494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:59.267595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.267644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:59.267701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:59.267779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:59.267815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:59.267905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:59.268011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:59.269019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:59.269321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:59.360792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:59.360847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:59.377872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:59.378011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:59.378189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:59.386601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:59.386812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:59.387544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:59.387788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:59.390947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.391132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:59.392219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:59.392301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:59.392383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:59.392420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:59.392458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:59.392753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:59.554361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.555567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.555723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.555834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.555939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:59.556861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2026-01-10T12:11:09.486082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.486237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.486667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.486750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.487827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.488420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.488758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.488852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.488913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.489069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.489132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.489236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:09.489643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:11:09.496912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:09.497118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:09.502183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1535:3467], Recipient [1:1535:3467]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:09.502251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:09.504744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:09.504840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:09.505328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1535:3467], Recipient [1:1535:3467]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:09.505392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:09.506195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:09.506270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:09.506339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:09.506377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:09.508363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1571:3467], Recipient [1:1535:3467]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:09.508422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:09.508473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1535:3467] sender: [1:1592:2058] recipient: [1:15:2062] 2026-01-10T12:11:09.557980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1591:3512], Recipient [1:1535:3467]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:11:09.558089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:09.558255Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:11:09.558625Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 362us result status StatusSuccess 2026-01-10T12:11:09.559584Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 18801 Memory: 141504 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestListTenants >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::SimpleEcho >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2026-01-10T12:10:29.644221Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006849s 2026-01-10T12:10:29.704227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704169685615078:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:29.716352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:29.935142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:30.013754Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012536s 2026-01-10T12:10:30.058119Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704168525976623:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:30.064896Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:30.125695Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704175253333242:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:30.125800Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:30.248865Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593704172598653329:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:30.258400Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:30.938254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:31.035311Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:31.056627Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:31.264146Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:31.359827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.360018Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:31.392950Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.393213Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:31.393928Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.393990Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:31.396211Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.396339Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:31.408268Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:31.504066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.504152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:31.600692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.600875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.629118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.631671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.636878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.636928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.637041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.637061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.637152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.637181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.644789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.774314Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:10:31.774362Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:10:31.774377Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:10:31.774394Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:10:31.796315Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.802032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.802308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.802430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.806461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:31.811387Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.811420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.811707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.861744Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.876047Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:32.238819Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:32.221069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:32.376593Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:32.748102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:34.676219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704169685615078:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:34.676341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:34.968499Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704168525976623:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:34.968566Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:35.120224Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593704175253333242:2155];send_to=[0:7307199536658146131:77625 ... :7307199536658146131:7762515]; 2026-01-10T12:10:56.739419Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:56.752663Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:56.752783Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:56.756244Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-10T12:10:56.757320Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:56.789271Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:56.855187Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855357Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855441Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855550Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855625Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855733Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855815Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855891Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.855977Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:10:56.877337Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-10T12:10:56.881156Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:56.881266Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:56.888101Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:56.987606Z node 10 :STATISTICS WARN: tx_init.cpp:298: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2026-01-10T12:10:57.039855Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:57.135622Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:57.269513Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:10:57.274522Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:57.274547Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:57.274556Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:57.274650Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:57.422598Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:57.612730Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7593704290209243244:3008], Database: /Root/test-serverless, Start database fetching 2026-01-10T12:10:57.613014Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7593704290209243244:3008], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2026-01-10T12:10:57.745699Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:00.913346Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:11:00.914001Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:11:00.914020Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:11:00.914363Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7593704302437845877:2358], Start check tables existence, number paths: 2 2026-01-10T12:11:00.915715Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:11:00.916480Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7593704302437845877:2358], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:11:00.916552Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7593704302437845877:2358], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:11:00.916587Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7593704302437845877:2358], Successfully finished 2026-01-10T12:11:00.916628Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:11:01.127868Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593704285257975703:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:01.127952Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:01.360941Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:11:01.362324Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7593704307389112514:2389], Start check tables existence, number paths: 2 2026-01-10T12:11:01.363153Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:11:01.363190Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:11:01.364306Z node 10 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:11:01.364527Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7593704307389112514:2389], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:11:01.364590Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7593704307389112514:2389], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:11:01.364624Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7593704307389112514:2389], Successfully finished 2026-01-10T12:11:01.364707Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:11:01.736120Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7593704285914274470:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:01.736237Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:06.816324Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:06.816361Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:07.621034Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-10T12:11:07.621719Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:11:07.629651Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2026-01-10T12:11:07.630065Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:11:07.655446Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=9&id=NDAxYzA4MjktOWI3NTUzMS0xMDEyZTg4NC1mMzkzNWY1Yw==, ActorId: [9:7593704285853678094:2332], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:11:07.655588Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=9&id=NDAxYzA4MjktOWI3NTUzMS0xMDEyZTg4NC1mMzkzNWY1Yw==, ActorId: [9:7593704285853678094:2332], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:11:07.655789Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=9&id=NDAxYzA4MjktOWI3NTUzMS0xMDEyZTg4NC1mMzkzNWY1Yw==, ActorId: [9:7593704285853678094:2332], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:11:07.656052Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=9&id=NDAxYzA4MjktOWI3NTUzMS0xMDEyZTg4NC1mMzkzNWY1Yw==, ActorId: [9:7593704285853678094:2332], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:11:07.656641Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=9&id=NDAxYzA4MjktOWI3NTUzMS0xMDEyZTg4NC1mMzkzNWY1Yw==, ActorId: [9:7593704285853678094:2332], ActorState: unknown state, Session actor destroyed trace_id# |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> BasicUsage::RecreateObserver [GOOD] >> TGRpcStreamingTest::ReadFinish >> TestMalformedRequest::CompressedGzipContentLengthLower [GOOD] >> IncrementalBackup::IncrementalBackupMultipleIndexes [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore >> TGRpcStreamingTest::ClientDisconnects >> TestMalformedRequest::CompressedGzipContentLengthHigher >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> TableCreator::CreateTables [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TestMalformedRequest::ContentLengthLower >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2026-01-10T12:09:14.685585Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1768046954685549 2026-01-10T12:09:15.262255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703849735405652:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:15.262322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:15.270396Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.013439s 2026-01-10T12:09:15.352421Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.471605Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.915891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:15.916007Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:15.922074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.216018Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.292497Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.311387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.311523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:16.490210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.490313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.512450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.512561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.617500Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:16.617775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.629309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.652838Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.785547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:16.801876Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:16.834546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.287875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002299/r3tmp/yandex137q9F.tmp 2026-01-10T12:09:17.287912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002299/r3tmp/yandex137q9F.tmp 2026-01-10T12:09:17.288192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002299/r3tmp/yandex137q9F.tmp 2026-01-10T12:09:17.288293Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:17.396993Z INFO: TTestServer started on Port 27376 GrpcPort 31008 PQClient connected to localhost:31008 2026-01-10T12:09:17.892377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:20.264218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703849735405652:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:20.264304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:21.522664Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877851706360:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.522693Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877851706352:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.522807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.524889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703877851706367:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.524980Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:21.532227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:21.561689Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703877851706366:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:09:21.686283Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703877851706396:2146] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:21.872111Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.122304s 2026-01-10T12:09:21.872330Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.122434s 2026-01-10T12:09:21.910331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:21.965291Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703875505210718:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:21.966300Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjQ3MDg5YTQtZTU1NmRmZTgtNzNiZWM1ZjgtYzZlODM0NDE=, ActorId: [1:7593703875505210657:2330], ActorState: ExecuteState, LegacyTraceId: 01kekwz8wh3k1qepwhpe776a6q, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:21.968716Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:09:21.970082Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703877851706411:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:21.976299Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTJjMjJjZW ... 3704339599844808:2494] disconnected; active server actors: 1 2026-01-10T12:11:09.809080Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844808:2494] client user disconnected session shared/user_3_1_13312259751160945055_v1 2026-01-10T12:11:09.809136Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-10T12:11:09.809188Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnreadableFamilies=0 [], RequireBalancing=0 [] 2026-01-10T12:11:09.809223Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2026-01-10T12:11:09.809251Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000045s 2026-01-10T12:11:09.811332Z :INFO: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Closing read session. Close timeout: 0.000000s 2026-01-10T12:11:09.811389Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:11:09.811427Z :INFO: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Counters: { Errors: 0 CurrentSessionLifetimeMs: 46 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:11:09.811497Z :NOTICE: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:11:09.811531Z :DEBUG: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] [] Abort session to cluster 2026-01-10T12:11:09.813010Z :INFO: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Closing read session. Close timeout: 0.000000s 2026-01-10T12:11:09.813078Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2026-01-10T12:11:09.813115Z :INFO: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 45 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:11:09.813177Z :NOTICE: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:11:09.813213Z :DEBUG: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] [] Abort session to cluster 2026-01-10T12:11:09.813335Z :INFO: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Closing read session. Close timeout: 0.000000s 2026-01-10T12:11:09.813376Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2026-01-10T12:11:09.813428Z :INFO: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 46 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:11:09.813498Z :NOTICE: [/Root] [/Root] [8c7ff8c4-6a5eaba8-92532aa4-bbdfdfb0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:11:09.813638Z :INFO: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Closing read session. Close timeout: 0.000000s 2026-01-10T12:11:09.813662Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:11:09.813687Z :INFO: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Counters: { Errors: 0 CurrentSessionLifetimeMs: 48 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:11:09.813724Z :NOTICE: [/Root] [/Root] [f54700b7-9189b60a-4d3e68f7-d67d9905] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:11:09.812418Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_17855370176879303312_v1 grpc read done: success# 0, data# { } 2026-01-10T12:11:09.812441Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_17855370176879303312_v1 grpc read failed 2026-01-10T12:11:09.812472Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 2 consumer shared/user session shared/user_3_2_17855370176879303312_v1 closed 2026-01-10T12:11:09.812910Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_17855370176879303312_v1 is DEAD 2026-01-10T12:11:09.814525Z :INFO: [/Root] [/Root] [2bef96cb-aef4371b-4d3eb105-a8bd875a] Closing read session. Close timeout: 0.000000s 2026-01-10T12:11:09.814555Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-10T12:11:09.814583Z :INFO: [/Root] [/Root] [2bef96cb-aef4371b-4d3eb105-a8bd875a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 53 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:11:09.814633Z :NOTICE: [/Root] [/Root] [2bef96cb-aef4371b-4d3eb105-a8bd875a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:11:09.814227Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_5894221301362923122_v1 grpc read done: success# 0, data# { } 2026-01-10T12:11:09.814235Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844807:2495] disconnected. 2026-01-10T12:11:09.814243Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_5894221301362923122_v1 grpc read failed 2026-01-10T12:11:09.814253Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844807:2495] disconnected; active server actors: 1 2026-01-10T12:11:09.814261Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 3 consumer shared/user session shared/user_3_3_5894221301362923122_v1 closed 2026-01-10T12:11:09.814263Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844807:2495] client user disconnected session shared/user_3_2_17855370176879303312_v1 2026-01-10T12:11:09.814288Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-10T12:11:09.814312Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_5894221301362923122_v1 is DEAD 2026-01-10T12:11:09.814355Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnreadableFamilies=0 [], RequireBalancing=0 [] 2026-01-10T12:11:09.814382Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-10T12:11:09.814403Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000030s 2026-01-10T12:11:09.814620Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844822:2499] disconnected. 2026-01-10T12:11:09.814638Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844822:2499] disconnected; active server actors: 1 2026-01-10T12:11:09.814969Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_5894221301362923122_v1 2026-01-10T12:11:09.814652Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7593704339599844822:2499] client user disconnected session shared/user_3_3_5894221301362923122_v1 2026-01-10T12:11:09.815007Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7593704339599844830:2506] destroyed 2026-01-10T12:11:09.815076Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_3_5894221301362923122_v1 2026-01-10T12:11:09.828634Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:09.828675Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:09.828690Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:09.828706Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:09.828716Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:09.928654Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:09.928699Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:09.928715Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:09.928738Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:09.928753Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:10.029053Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:10.029095Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:10.029114Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:10.029145Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:10.029161Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:10.413044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:10.413080Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |86.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2026-01-10T12:11:09.587369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704339460028821:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:09.587722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:09.800271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:09.802640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:09.802778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:09.805418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:09.911277Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704339460028716:2081] 1768047069577907 != 1768047069577910 2026-01-10T12:11:09.911800Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:10.006123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:10.099325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:10.099347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:10.099354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:10.099439Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:10.146486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:10.156170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:10.157778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:10.589201Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.9%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |86.9%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TConsoleTests::TestListTenants [GOOD] >> TestMalformedRequest::ContentLengthNone >> TGRpcStreamingTest::ReadFinish [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2026-01-10T12:11:10.975044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704345339794158:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:10.975139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:11.217459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:11.221087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:11.295786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:11.322106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:11.322968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:11.344260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704345339794126:2081] 1768047070973739 != 1768047070973742 2026-01-10T12:11:11.416845Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7dbce676f880] stream accepted Name# Session ok# true peer# ipv6:[::1]:47514 2026-01-10T12:11:11.417568Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7dbce676f880] facade attach Name# Session actor# [1:7593704349634762077:2368] peer# ipv6:[::1]:47514 2026-01-10T12:11:11.417600Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7dbce676f880] facade read Name# Session peer# ipv6:[::1]:47514 2026-01-10T12:11:11.417684Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7dbce676f880] facade write Name# Session data# peer# ipv6:[::1]:47514 2026-01-10T12:11:11.418070Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7dbce676f880] facade finish Name# Session peer# ipv6:[::1]:47514 grpc status# (0) message# 2026-01-10T12:11:11.418080Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7dbce676f880] write finished Name# Session ok# true peer# ipv6:[::1]:47514 2026-01-10T12:11:11.418099Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2026-01-10T12:11:11.418417Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7dbce676f880] read finished Name# Session ok# false data# peer# ipv6:[::1]:47514 2026-01-10T12:11:11.418459Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7dbce676f880] stream done notification Name# Session ok# true peer# ipv6:[::1]:47514 2026-01-10T12:11:11.418496Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7dbce676f880] stream finished Name# Session ok# true peer# ipv6:[::1]:47514 grpc status# (0) message# 2026-01-10T12:11:11.418549Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7dbce676f880] deregistering request Name# Session peer# ipv6:[::1]:47514 (finish done) 2026-01-10T12:11:11.418811Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2026-01-10T12:11:11.418851Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2026-01-10T12:11:11.567554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> TConsoleTests::TestListTenantsExtSubdomain >> JsonProtoConversion::JsonToProtoMap [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2026-01-10T12:11:11.241905Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704350660992359:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:11.241965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:11.266877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:11.511774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:11.514413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:11.514582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:11.517710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:11.634996Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:11.648296Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704350660992107:2081] 1768047071217510 != 1768047071217513 2026-01-10T12:11:11.680012Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d35f487f480] stream accepted Name# Session ok# true peer# ipv6:[::1]:59152 2026-01-10T12:11:11.682174Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d35f487f480] facade attach Name# Session actor# [1:7593704350660992743:2353] peer# ipv6:[::1]:59152 2026-01-10T12:11:11.682214Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d35f487f480] facade read Name# Session peer# ipv6:[::1]:59152 2026-01-10T12:11:11.682406Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d35f487f480] read finished Name# Session ok# true data# peer# ipv6:[::1]:59152 2026-01-10T12:11:11.683368Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2026-01-10T12:11:11.683405Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d35f487f480] facade write Name# Session data# peer# ipv6:[::1]:59152 2026-01-10T12:11:11.683662Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d35f487f480] facade finish Name# Session peer# ipv6:[::1]:59152 grpc status# (0) message# 2026-01-10T12:11:11.683743Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d35f487f480] write finished Name# Session ok# true peer# ipv6:[::1]:59152 2026-01-10T12:11:11.684007Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d35f487f480] stream done notification Name# Session ok# true peer# ipv6:[::1]:59152 2026-01-10T12:11:11.684055Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d35f487f480] stream finished Name# Session ok# true peer# ipv6:[::1]:59152 grpc status# (0) message# 2026-01-10T12:11:11.684125Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d35f487f480] deregistering request Name# Session peer# ipv6:[::1]:59152 (finish done) 2026-01-10T12:11:11.770427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2026-01-10T12:11:11.248917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704348580871812:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:11.249000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:11.622529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:11.622649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:11.676465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:11.700775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:11.760853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:11.815297Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d9326c9fa80] stream accepted Name# Session ok# true peer# ipv6:[::1]:33754 2026-01-10T12:11:11.815695Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d9326c9fa80] facade attach Name# Session actor# [1:7593704348580872504:2462] peer# ipv6:[::1]:33754 2026-01-10T12:11:11.815731Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d9326c9fa80] facade read Name# Session peer# ipv6:[::1]:33754 2026-01-10T12:11:11.815898Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d9326c9fa80] read finished Name# Session ok# false data# peer# ipv6:[::1]:33754 2026-01-10T12:11:11.816154Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2026-01-10T12:11:11.816253Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d9326c9fa80] facade finish Name# Session peer# ipv6:[::1]:33754 grpc status# (9) message# Everything is A-OK 2026-01-10T12:11:11.816790Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d9326c9fa80] stream done notification Name# Session ok# true peer# ipv6:[::1]:33754 2026-01-10T12:11:11.816917Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone 2026-01-10T12:11:11.816935Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d9326c9fa80] stream finished Name# Session ok# true peer# ipv6:[::1]:33754 grpc status# (9) message# Everything is A-OK 2026-01-10T12:11:11.817005Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d9326c9fa80] deregistering request Name# Session peer# ipv6:[::1]:33754 (finish done) 2026-01-10T12:11:11.956177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2026-01-10T12:11:11.161776Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704350489748924:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:11.163026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:11.411815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:11.435461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:11.435597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:11.515443Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704350489748886:2081] 1768047071158906 != 1768047071158909 2026-01-10T12:11:11.525220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:11.529196Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:11.630573Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d783a512c80] stream accepted Name# Session ok# true peer# ipv6:[::1]:42066 2026-01-10T12:11:11.632710Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d783a512c80] facade attach Name# Session actor# [1:7593704350489749550:2377] peer# ipv6:[::1]:42066 2026-01-10T12:11:11.632739Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d783a512c80] facade write Name# Session data# peer# ipv6:[::1]:42066 2026-01-10T12:11:11.633042Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x7d783a512c80] facade write Name# Session data# peer# ipv6:[::1]:42066 grpc status# (0) message# 2026-01-10T12:11:11.633066Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d783a512c80] write finished Name# Session ok# true peer# ipv6:[::1]:42066 2026-01-10T12:11:11.633442Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d783a512c80] stream done notification Name# Session ok# true peer# ipv6:[::1]:42066 2026-01-10T12:11:11.633490Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d783a512c80] write finished Name# Session ok# true peer# ipv6:[::1]:42066 2026-01-10T12:11:11.633517Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d783a512c80] stream finished Name# Session ok# true peer# ipv6:[::1]:42066 grpc status# (0) message# 2026-01-10T12:11:11.633749Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d783a512c80] deregistering request Name# Session peer# ipv6:[::1]:42066 (finish done) 2026-01-10T12:11:11.635143Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-01-10T12:11:11.635152Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-01-10T12:11:11.649816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |86.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2026-01-10T12:11:12.245681Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704355467990206:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:12.248335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:12.462654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:12.549027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704355467990179:2081] 1768047072244106 != 1768047072244109 2026-01-10T12:11:12.558279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:12.573048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:12.573195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:12.582160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:12.654724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:12.663132Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d2be5411e80] stream accepted Name# Session ok# true peer# ipv6:[::1]:34170 2026-01-10T12:11:12.663467Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d2be5411e80] facade attach Name# Session actor# [1:7593704355467990945:2464] peer# ipv6:[::1]:34170 2026-01-10T12:11:12.663519Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d2be5411e80] facade read Name# Session peer# ipv6:[::1]:34170 2026-01-10T12:11:12.663590Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d2be5411e80] facade finish Name# Session peer# ipv6:[::1]:34170 grpc status# (0) message# 2026-01-10T12:11:12.664031Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d2be5411e80] stream done notification Name# Session ok# true peer# ipv6:[::1]:34170 2026-01-10T12:11:12.664033Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d2be5411e80] read finished Name# Session ok# false data# peer# ipv6:[::1]:34170 2026-01-10T12:11:12.664098Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d2be5411e80] stream finished Name# Session ok# true peer# ipv6:[::1]:34170 grpc status# (0) message# 2026-01-10T12:11:12.664140Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d2be5411e80] deregistering request Name# Session peer# ipv6:[::1]:34170 (finish done) 2026-01-10T12:11:12.664141Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 >> TSchemeShardViewTest::CreateView >> TestMalformedRequest::CompressedDeflateContentLengthLower |86.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |86.9%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardViewTest::EmptyName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2026-01-10T12:11:12.999233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704353955726700:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:12.999834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:13.240008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:13.244694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:13.244830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:13.315946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:13.329152Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:13.332319Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704353955726670:2081] 1768047072997385 != 1768047072997388 2026-01-10T12:11:13.439243Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d44b0b8fe80] stream done notification Name# Session ok# true peer# ipv6:[::1]:44904 2026-01-10T12:11:13.439320Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d44b0b8fe80] stream accepted Name# Session ok# true peer# ipv6:[::1]:44904 2026-01-10T12:11:13.439832Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d44b0b8fe80] facade attach Name# Session actor# [1:7593704358250694711:2458] peer# ipv6:[::1]:44904 2026-01-10T12:11:13.439871Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2026-01-10T12:11:13.440271Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d44b0b8fe80] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2026-01-10T12:11:13.440365Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d44b0b8fe80] deregistering request Name# Session peer# unknown (finish done) 2026-01-10T12:11:13.444159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 |87.1%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:17.215616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:17.215717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:17.215763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:17.215802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:17.215835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:17.215865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:17.215922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:17.215998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:17.216978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:17.217267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:17.311083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:17.311150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:17.328866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:17.328983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:17.329224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:17.337065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:17.337320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:17.338097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:17.338318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:17.340592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:17.340797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:17.342014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:17.342078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:17.342138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:17.342183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:17.342284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:17.342554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:17.482166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.482946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.483756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-10T12:11:18.180859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-10T12:11:18.180982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-10T12:11:18.181333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:18.181474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:18.181538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-10T12:11:18.181646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-10T12:11:18.181808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:18.181858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:11:18.183591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:18.183641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:18.183827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:11:18.183928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:18.183971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:11:18.184007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:11:18.184101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.184145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:11:18.184235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:18.184260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:18.184295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:18.184351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:18.184390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:11:18.184436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:18.184485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:11:18.184529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:11:18.184595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:11:18.184631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:11:18.184674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:11:18.184694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:11:18.185697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:18.185803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:18.185850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:18.185924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:11:18.185970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:18.186700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:18.186773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:18.186806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:18.186827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:11:18.186852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:18.186898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:11:18.189347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:11:18.189483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:11:18.189631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:11:18.189660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:11:18.189962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:11:18.190027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:11:18.190057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:710:2700] TestWaitNotification: OK eventTxId 101 2026-01-10T12:11:18.190382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:18.190536Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 156us result status StatusSuccess 2026-01-10T12:11:18.190826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:17.492404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:17.492491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:17.492538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:17.492593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:17.492654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:17.492684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:17.492759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:17.492838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:17.493679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:17.493996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:17.578074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:17.578134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:17.592924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:17.593031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:17.593294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:17.601822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:17.602049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:17.602593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:17.602757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:17.606421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:17.606575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:17.607502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:17.607553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:17.607597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:17.607630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:17.607707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:17.607917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:17.737952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.738736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.738852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.738914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.738962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:17.739518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... irst GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:18.484396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:18.484610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:11:18.484929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.485016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:11:18.485066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:11:18.485116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:11:18.490622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.490742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:11:18.490798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:11:18.493610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.493691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.493784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:11:18.493848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:11:18.494047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:11:18.496427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:11:18.496610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:11:18.497144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:18.497307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:18.497362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:11:18.497730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:11:18.497808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:11:18.498002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:18.498107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:18.501011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:18.501112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:18.501389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:18.501466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:11:18.501861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:11:18.501919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:11:18.502036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:11:18.502077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:11:18.502130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:11:18.502176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:11:18.502220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:11:18.502282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:11:18.502340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:11:18.502398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:11:18.502490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:18.502536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:11:18.502603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:11:18.503507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:11:18.503642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:11:18.503699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:11:18.503762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:11:18.503822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:18.503950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:11:18.508449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:11:18.509128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:11:18.509864Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:11:18.511298Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:11:18.515267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:11:18.515518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2026-01-10T12:11:18.515646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2026-01-10T12:11:18.515812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-10T12:11:18.516898Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:11:18.521458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:18.521756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2026-01-10T12:11:18.522449Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::AsyncDropSameView |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit >> TestMalformedRequest::ContentLengthLower [GOOD] |87.1%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> TSchemeShardViewTest::ReadOnlyMode |87.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart >> TestMalformedRequest::ContentLengthHigher >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestMergeConfig >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:19.782003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:19.782094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.782132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:19.782194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:19.782248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:19.782278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:19.782354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.782446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:19.783366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:19.783673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:19.857810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:19.857881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:19.874595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:19.874713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:19.874961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:19.890678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:19.890981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:19.891827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:19.892119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:19.896252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.896484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:19.897755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:19.897816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.897899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:19.897944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:19.898051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:19.898327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.047351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.048944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:20.822744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:11:20.824631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:20.824678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:20.824862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:11:20.825003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:20.825058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:11:20.825109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:11:20.825369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.825416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:11:20.825503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:11:20.825534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:11:20.825571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:11:20.825598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:11:20.825643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:11:20.825696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:11:20.825729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:11:20.825778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:11:20.825841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:11:20.825876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:11:20.825904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:11:20.825933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:11:20.826584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:11:20.826675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:11:20.826721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:11:20.826766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:11:20.826805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:20.827415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:11:20.827499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:11:20.827525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:11:20.827568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:11:20.827596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:20.827679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:11:20.828120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:11:20.828172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:11:20.828236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:20.831135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:11:20.831381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:11:20.832225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2026-01-10T12:11:20.832560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:11:20.832603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:11:20.832712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:11:20.832747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-10T12:11:20.832797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:11:20.832832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:11:20.833300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:11:20.833451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.833485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:738:2728] 2026-01-10T12:11:20.833646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:11:20.833741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:11:20.833777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.833819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:738:2728] 2026-01-10T12:11:20.833911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.833932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:738:2728] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-01-10T12:11:20.834425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.834636Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 230us result status StatusPathDoesNotExist 2026-01-10T12:11:20.834802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:19.648193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:19.648291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.648333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:19.648379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:19.648420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:19.648449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:19.648511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.648586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:19.649505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:19.649790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:19.744168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:19.744229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:19.760739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:19.760862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:19.761120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:19.770611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:19.770879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:19.771637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:19.771892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:19.775201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.775421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:19.776742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:19.776810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.776882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:19.776928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:19.777039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:19.777367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:19.934574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.935984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.936057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.936149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:19.936204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:20.605569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:20.605621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:20.605808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:11:20.605949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:20.605992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:11:20.606042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:11:20.606342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.606413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:11:20.606533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:20.606574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:20.606609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:20.606639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:20.606676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:11:20.606716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:20.606753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:11:20.606786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:11:20.606880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:11:20.606937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:11:20.606996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:11:20.607074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:11:20.607942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:20.608042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:20.608099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:20.608142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:11:20.608183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:20.609095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:20.609186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:20.609220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:20.609246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:11:20.609288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:20.609381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:11:20.612239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:11:20.613489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-10T12:11:20.613807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:11:20.613927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:11:20.614037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:11:20.614061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:11:20.614158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:11:20.614183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:11:20.614642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:11:20.614737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.614773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:716:2706] 2026-01-10T12:11:20.615215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:11:20.615289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.615322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:716:2706] 2026-01-10T12:11:20.615457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:11:20.615538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.615563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:716:2706] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-10T12:11:20.616034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.616267Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 263us result status StatusSuccess 2026-01-10T12:11:20.616741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |87.2%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |87.2%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::ReadOnlyMode [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:19.747293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:19.747401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.747444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:19.747480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:19.747519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:19.747551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:19.747635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:19.747722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:19.748766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:19.749108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:19.860933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:19.860994Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:19.875852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:19.875939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:19.876177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:19.894229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:19.894530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:19.895293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:19.895574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:19.899659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.899833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:19.900988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:19.901053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:19.901121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:19.901171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:19.901260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:19.901513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.075284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.076459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.076616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.076735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.076826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.076908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.077721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:11:20.848652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:11:20.848712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:11:20.848752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:11:20.848823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:11:20.856909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:11:20.857521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-10T12:11:20.857824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:11:20.857868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:11:20.857957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:11:20.857978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:11:20.858034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:11:20.858051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:11:20.858578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:11:20.858684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:11:20.858727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.858769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:745:2735] 2026-01-10T12:11:20.858945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.858988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:745:2735] 2026-01-10T12:11:20.859041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:11:20.859133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:11:20.859183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:745:2735] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-10T12:11:20.859629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.859862Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 207us result status StatusSuccess 2026-01-10T12:11:20.860357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:20.860900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.861091Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 195us result status StatusSuccess 2026-01-10T12:11:20.861414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:20.861931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.862095Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 182us result status StatusSuccess 2026-01-10T12:11:20.862365Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> TestMalformedRequest::ContentLengthNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:10:57.876811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:10:57.876913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:57.876952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:10:57.877006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:10:57.877044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:10:57.877076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:10:57.877137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:10:57.877217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:10:57.878068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:10:57.878320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:10:57.973288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:57.973351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:57.988950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:10:57.989062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:10:57.989251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:10:57.997775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:10:57.998022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:10:57.998765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:10:57.999003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:10:58.001300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.001466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:10:58.002597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:10:58.002663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:10:58.002734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:10:58.002782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:10:58.002824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:10:58.003076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:10:58.147251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.148948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.149011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.149070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.149164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.149257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:10:58.149381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1:20.779081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-10T12:11:20.779370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.779480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.779818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.779889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.780672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.781714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.782100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:11:20.788740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:20.788904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:20.792359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:2161:4085], Recipient [1:2161:4085]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:20.792414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-10T12:11:20.793583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:20.793655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:20.794579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:2161:4085], Recipient [1:2161:4085]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:20.794630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:20.795900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:20.795974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:20.796029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:20.796063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:11:20.797768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:2199:4085], Recipient [1:2161:4085]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:20.797818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:11:20.797861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2161:4085] sender: [1:2220:2058] recipient: [1:15:2062] 2026-01-10T12:11:20.849432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:2219:4132], Recipient [1:2161:4085]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:11:20.849482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:11:20.849568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:11:20.849773Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 198us result status StatusSuccess 2026-01-10T12:11:20.850349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 19359 Memory: 156864 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:20.301396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:20.301528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:20.301583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:20.301627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:20.301674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:20.301709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:20.301804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:20.301886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:20.302963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:20.303280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:20.403622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:20.403697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:20.425625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:20.425729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:20.425952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:20.437257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:20.437524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:20.438424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:20.438652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:20.441261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:20.441476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:20.442761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:20.442840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:20.442917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:20.442971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:20.443093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:20.443380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:20.603508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.604581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.604740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.604842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.604925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:20.605798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... less_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:21.705306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:789:2758] sender: [1:847:2058] recipient: [1:15:2062] 2026-01-10T12:11:21.746425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:11:21.746712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2026-01-10T12:11:21.746788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2026-01-10T12:11:21.746969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:11:21.747029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:11:21.747061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: 2026-01-10T12:11:21.747106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:11:21.751428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-10T12:11:21.751705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2026-01-10T12:11:21.751900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:11:21.751950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2026-01-10T12:11:21.752001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-10T12:11:21.752161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:11:21.753987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:11:21.754151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-10T12:11:21.754787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:21.754906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:21.754966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2026-01-10T12:11:21.755123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-10T12:11:21.755277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:21.755331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:11:21.757144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:21.757183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:21.757335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:11:21.757429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:21.757477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:840:2798], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-10T12:11:21.757532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:840:2798], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:11:21.757801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:11:21.757850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:11:21.757942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:11:21.757974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:11:21.758005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:11:21.758031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:11:21.758067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:11:21.758102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:11:21.758135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:11:21.758166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:11:21.758234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:11:21.758275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-10T12:11:21.758313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:11:21.758337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:11:21.759049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:11:21.759148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:11:21.759182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:11:21.759219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:11:21.759259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:11:21.759907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:11:21.759970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:11:21.760013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:11:21.760036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:11:21.760067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:11:21.760189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:11:21.763164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:11:21.764237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> TestMalformedRequest::CompressedDeflateContentLengthLower [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> TestMalformedRequest::CompressedDeflateContentLengthHigher |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthNone [GOOD] Test command err: 2026-01-10T12:11:16.297479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704373319015921:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:16.297594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:16.521851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:16.521995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:16.529378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:16.685868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:16.686595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704373319015894:2081] 1768047076295661 != 1768047076295664 2026-01-10T12:11:16.701590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:16.753503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:16.753537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:16.753544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:16.753618Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:16.859322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:16.948410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:17.150004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:11:17.157156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:17.179429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:11:17.244173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.301225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:17.405436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.457406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:11:17.464282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.518572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.550860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.586792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.618931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.665804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.702516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.739749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:19.395455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386203919446:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:19.395455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386203919434:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:19.395534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:19.395885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386203919448:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:19.395964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:19.399768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:19.418201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704386203919449:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:11:19.487065Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704386203919501:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:19.909429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:19.942868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:19.979509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:20.008366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:20.040902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:11:21.370164Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:11:21.370292Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 78ms 2026-01-10T12:11:21.370968Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:11:21.371042Z node 1 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:11:21.371059Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 76ms 2026-01-10T12:11:21.371560Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:11:21.371588Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:11:21.371681Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 77ms 2026-01-10T12:11:21.372304Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:11:21.540184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704394793854763:2434]: Pool not found 2026-01-10T12:11:21.540437Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:11:21.819453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704394793854790:2439]: Pool not found 2026-01-10T12:11:21.819738Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:11:21.822714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7593704394793854891:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:11:21.822714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704394793854890:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:21.822792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:21.823081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704394793854894:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:21.823130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.098834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7593704394793854888:2455]: Pool not found 2026-01-10T12:11:22.099156Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:11:22.280250Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:60498) incoming connection opened 2026-01-10T12:11:22.280323Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:60498) -> (POST /Root) 2026-01-10T12:11:22.280612Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38de:742c:2e7c:0:20de:742c:2e7c:0] request [CreateQueue] url [/Root] database [/Root] requestId: e25a1b55-81de681e-14165095-94a5fc57 2026-01-10T12:11:22.281701Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [e25a1b55-81de681e-14165095-94a5fc57] reply with status: BAD_REQUEST message: Empty body 2026-01-10T12:11:22.281923Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:60498) <- (400 InvalidArgumentException, 60 bytes) 2026-01-10T12:11:22.281968Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:60498) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json 2026-01-10T12:11:22.281999Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:60498) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: e25a1b55-81de681e-14165095-94a5fc57 Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-10T12:11:22.284621Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:60498) connection closed |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSchemeShardViewTest::EmptyQueryText >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::LsLs >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::CreateSolomonInsideSubDomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> TStorageTenantTest::CreateTableInsideSubDomain >> TStorageTenantTest::Boot >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TStorageTenantTest::GenericCases >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TSchemeShardViewTest::EmptyQueryText [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |87.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |87.3%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] |87.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:11:26.655524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:11:26.655616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:26.655667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:11:26.655707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:11:26.655779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:11:26.655809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:11:26.655870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:11:26.655930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:11:26.656990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:11:26.657390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:11:26.748220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:26.748283Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:26.769964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:11:26.770085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:11:26.770348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:11:26.778226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:11:26.778510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:11:26.779181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:26.779381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:11:26.781140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:26.781290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:11:26.782196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:26.782244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:26.782302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:11:26.782335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:26.782414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:11:26.782605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:11:26.941474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.942969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:11:26.943028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:11:27.600902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:11:27.601149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2026-01-10T12:11:27.601231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2026-01-10T12:11:27.601392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:11:27.601482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-10T12:11:27.601525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-10T12:11:27.601574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:11:27.602691Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:11:27.609598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-10T12:11:27.609852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2026-01-10T12:11:27.610235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:11:27.610289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2026-01-10T12:11:27.610349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-10T12:11:27.610458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:11:27.610989Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-10T12:11:27.612590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-10T12:11:27.612750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-10T12:11:27.613193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:11:27.613304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:11:27.613369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-10T12:11:27.613512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-10T12:11:27.613697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:11:27.613766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:11:27.615861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:11:27.615905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:11:27.616134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:11:27.616276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:11:27.616320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:11:27.616372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:11:27.616438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:11:27.616484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:11:27.616584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:27.616618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:27.616660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:11:27.616710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:27.616759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:11:27.616821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:11:27.616869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:11:27.616911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:11:27.617002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:11:27.617043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:11:27.617111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:11:27.617141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:11:27.618238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:27.618367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:27.618417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:27.618461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:11:27.618519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:11:27.619322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:27.619441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:11:27.619480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:11:27.619509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:11:27.619538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:11:27.619607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:11:27.622985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:11:27.623165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |87.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> HttpRequest::ProbeServerless [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:51.841591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:51.961064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:51.970966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:51.971512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:51.971620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:52.387772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:52.387920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:52.395601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046928216905 != 1768046928216908 2026-01-10T12:08:52.414751Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:52.472877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:52.635088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:52.956632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.956719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.956762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.956839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-10T12:08:52.956867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:08:53.114917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:08:53.115158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:53.115385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:08:53.115428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:08:53.115621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:08:53.115700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:53.115791Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:53.116554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:08:53.116725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:08:53.116770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:53.116807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:53.117000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.117043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.117112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:53.117168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:08:53.117217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:53.117252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:53.117360Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:53.117877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:53.117919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:53.118027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.118077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.118132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:53.118179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:08:53.118223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:53.118303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:53.118758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:53.118811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:53.118951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.118983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:53.119030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:53.119061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:53.119110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:08:53.119141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:53.119188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:53.123049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:53.123595Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:53.123636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:53.123785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:08:53.124011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-10T12:08:53.124421Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:53.124469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:53.124503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:53.124639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-10T12:08:53.124671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:08:53.124738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... ck for operation: 281474976715672 2026-01-10T12:11:21.317915Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715672 2026-01-10T12:11:21.420117Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.420219Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.420330Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.420371Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.604152Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.604242Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.604387Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.604442Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.701060Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037911, for tableId 40, but no stats yet 2026-01-10T12:11:21.702274Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 49, but no stats yet 2026-01-10T12:11:21.702432Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037909, for tableId 46, but no stats yet 2026-01-10T12:11:21.702593Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037910, for tableId 43, but no stats yet 2026-01-10T12:11:21.781876Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.781974Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.782110Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.782149Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.877277Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037916, for tableId 54, but no stats yet 2026-01-10T12:11:21.877483Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037917, for tableId 51, but no stats yet 2026-01-10T12:11:21.877638Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037918, for tableId 53, but no stats yet 2026-01-10T12:11:21.877768Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037919, for tableId 56, but no stats yet 2026-01-10T12:11:21.896557Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-10T12:11:21.896618Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:21.896692Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:21.990854Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.990958Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:21.991091Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:21.991138Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.013592Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-10T12:11:22.013779Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-10T12:11:22.102032Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037890 state 5 2026-01-10T12:11:22.102382Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037891 state 5 2026-01-10T12:11:22.203480Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.203566Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.203679Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.203714Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.382138Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.382224Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.382342Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.382377Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.408626Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.419610Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037902 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.419803Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.419926Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.565331Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.565390Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.565472Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.565500Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.678788Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.689733Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037909 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.689920Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037910 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.689986Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:22.766668Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.766754Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.766866Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.766906Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.936838Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.936925Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:22.937026Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:401:2400], Recipient [10:401:2400]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:22.937062Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } } { items { uint32_value: 1 } items { uint32_value: 1000 } }, { items { uint32_value: 2 } items { uint32_value: 2000 } }, { items { uint32_value: 3 } items { uint32_value: 3000 } } { items { uint32_value: 3 } } { items { uint32_value: 3 } } { items { uint64_value: 3 } } { items { uint64_value: 3 } } { items { uint32_value: 300 } items { uint32_value: 3 } } { items { uint32_value: 3000 } items { uint32_value: 3 } } |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |87.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 |87.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:20.566761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:20.713338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:20.722932Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:20.723381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:20.723475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:21.183604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.298234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:21.298360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:21.334917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:21.435797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:22.275999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:22.278008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:22.278070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:22.278109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:22.278314Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:22.348809Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.042601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:27.614607Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:27.614741Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:27.634305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:27.726937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:27.727084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:27.783885Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:27.786355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:28.056977Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:28.057113Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:28.059026Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.059872Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.065601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.066542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.066712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.067016Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.067158Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.067291Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.067422Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:28.105453Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:28.653680Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:28.800815Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:28.800969Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:28.899132Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:28.905992Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:28.906299Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:28.906390Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:28.906447Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:28.906537Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:28.906592Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:28.906675Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:28.907755Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:28.946364Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:28.946497Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:29.015886Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:08:29.021517Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:08:29.115422Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:08:29.120654Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:08:29.167639Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Describe result: PathErrorUnknown 2026-01-10T12:08:29.167737Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Creating table 2026-01-10T12:08:29.167858Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:08:29.280932Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:29.285649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:29.294292Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:29.294483Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:29.310160Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:29.565022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:08:29.631474Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:30.149682Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:30.291861Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:30.291990Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Column diff is empty, finishing 2026-01-10T12:08:31.158544Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... y with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:09:03.236129Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6038:5117], Start read next stream part 2026-01-10T12:09:04.037761Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6065:5131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:04.038154Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:09:04.038454Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:09:04.038541Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6068:5134] 2026-01-10T12:09:04.038627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6068:5134] 2026-01-10T12:09:04.039171Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:6069:5135] 2026-01-10T12:09:04.039385Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6068:5134], server id = [2:6069:5135], tablet id = 72075186224037894, status = OK 2026-01-10T12:09:04.039467Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:6069:5135], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:09:04.039542Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:09:04.039772Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:09:04.039897Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6065:5131], StatRequests.size() = 1 2026-01-10T12:09:04.040275Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:10:25.808484Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwyq038sqazckazkt20fk4", SessionId: ydb://session/3?node_id=2&id=Y2ZmOTM1ZDEtY2E2NDgwYzMtYmM0MWE2OWQtMjBhZTUwMGY=, Slow query, duration: 82.553649s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:10:25.810283Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6038:5117], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:10:25.810546Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6038:5117], Start read next stream part 2026-01-10T12:10:25.815221Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-10T12:10:25.815586Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:10:25.816253Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6038:5117], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:10:25.816329Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6038:5117], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGE5YjExOC1kNzRhNzFkMi1hMzBhYTY5Ni00MzM0NGE0OA==, TxId: 2026-01-10T12:10:25.820543Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:10:25.820697Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], Start read next stream part 2026-01-10T12:10:25.945622Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6279:5323]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:10:25.946084Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:10:25.946144Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6279:5323], StatRequests.size() = 1 2026-01-10T12:11:27.054084Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx17mw5dtxwmzw30tm4kjx", SessionId: ydb://session/3?node_id=2&id=YTI2MDJhNzUtNWYzODU3OWEtMjJiY2U1MDgtYmQ5MjZlM2Q=, Slow query, duration: 61.228269s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:11:27.057698Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:11:27.057869Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], Start read next stream part 2026-01-10T12:11:27.058385Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33540, txId: 18446744073709551615] shutting down 2026-01-10T12:11:27.058858Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:11:27.058917Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6028:2460], ActorId: [2:6261:5308], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzExYzZhODQtN2M2Yjg3YjQtZjUyNjRmNmEtMzM1YjIwMGU=, TxId: 2026-01-10T12:11:27.059421Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6435:5463], ActorId: [2:6438:5466], Starting query actor #1 [2:6441:5468] 2026-01-10T12:11:27.059503Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6438:5466], ActorId: [2:6441:5468], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-10T12:11:27.075566Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6438:5466], ActorId: [2:6441:5468], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2E3NTU3OTUtMmUxNDNlYmUtYTM5ZGYxZDEtZjVkNDk4Zjk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:11:27.215705Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6453:5479]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:11:27.216246Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:11:27.216624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-10T12:11:27.216705Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:11:27.216950Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:11:27.217049Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6453:5479], StatRequests.size() = 1 2026-01-10T12:11:27.217332Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:11:27.477422Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6438:5466], ActorId: [2:6441:5468], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2E3NTU3OTUtMmUxNDNlYmUtYTM5ZGYxZDEtZjVkNDk4Zjk=, TxId: 2026-01-10T12:11:27.477559Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6438:5466], ActorId: [2:6441:5468], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2E3NTU3OTUtMmUxNDNlYmUtYTM5ZGYxZDEtZjVkNDk4Zjk=, TxId: 2026-01-10T12:11:27.478180Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6435:5463], ActorId: [2:6438:5466], Got response [2:6441:5468] SUCCESS 2026-01-10T12:11:27.478656Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:11:27.514435Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-10T12:11:27.514544Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=[Kx\m, ActorId=[1:4394:3721] 2026-01-10T12:11:27.519063Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6471:4289]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:11:27.519428Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:11:27.519484Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:11:27.519781Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:11:27.519835Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:11:27.519888Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:11:27.540734Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |87.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TConsoleTests::TestAuthorization |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |87.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |87.3%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |87.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::SimpleTableCreation >> TStorageTenantTest::LsLs [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |87.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut >> TStorageTenantTest::GenericCases [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |87.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> TStorageTenantTest::DeclareAndDefine [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |87.4%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:51.617683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:51.741428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:51.751595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:51.752169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:51.752304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:52.230288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:52.230420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:52.237818Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046927947415 != 1768046927947418 2026-01-10T12:08:52.251599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:52.302905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:52.398289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:52.733408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.733500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.733554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.733666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-10T12:08:52.733704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:08:52.889204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:08:52.889489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.889742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:08:52.889791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:08:52.890011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:08:52.890096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:52.890201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.891036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:08:52.891206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:08:52.891259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.891300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.891504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.891562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.891648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.891745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:08:52.891791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:52.891848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:52.891950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.892551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.892599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.892719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.892755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.892813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.892869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:08:52.892918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:52.892995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.893343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.893375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.893491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.893525Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.893572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.893606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.893656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:08:52.893703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.893754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:52.907542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:52.908303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.908392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:52.908554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:08:52.908851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-10T12:08:52.909232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.909288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.909340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.909495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-10T12:08:52.909533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:08:52.909620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... 0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:28.697255Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:28.697358Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:28.697397Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:28.728528Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressIncrementalRestore 2026-01-10T12:11:28.728624Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5416: StateWork, processing event TEvPrivate::TEvProgressIncrementalRestore 2026-01-10T12:11:28.728666Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:444: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 281474976715670 tablet: 72057594046644480 2026-01-10T12:11:28.728744Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 281474976715670 tablet: 72057594046644480 2026-01-10T12:11:28.728788Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:55: [IncrementalRestore] Incremental restore already in state 3, skipping progress check for operation: 281474976715670 2026-01-10T12:11:28.728899Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715670 2026-01-10T12:11:28.853326Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037896, for tableId 18, but no stats yet 2026-01-10T12:11:28.853757Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037897, for tableId 20, but no stats yet 2026-01-10T12:11:28.854024Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037895, for tableId 18, but no stats yet 2026-01-10T12:11:28.958926Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:28.959013Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:28.959089Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:28.959117Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.116586Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.116691Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.116870Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.116934Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.156027Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037898, for tableId 31, but no stats yet 2026-01-10T12:11:29.156360Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037903, for tableId 28, but no stats yet 2026-01-10T12:11:29.338485Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.338560Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.338665Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.338701Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.352701Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 35, but no stats yet 2026-01-10T12:11:29.352930Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037905, for tableId 33, but no stats yet 2026-01-10T12:11:29.353046Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037906, for tableId 33, but no stats yet 2026-01-10T12:11:29.438233Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-10T12:11:29.438309Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:29.438388Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:29.591039Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.591143Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.591271Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.591315Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.615518Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-10T12:11:29.615732Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-10T12:11:29.788343Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.788438Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.788549Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.788590Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.917481Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:29.917749Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:29.917857Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:29.997040Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.997130Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:29.997239Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:29.997279Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:30.137127Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:30.137216Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:30.137309Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:30.137338Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:30.194188Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:30.204992Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:11:30.364282Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:30.364799Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:11:30.364975Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:11:30.365016Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 2 } items { text_value: "Bob" } items { uint32_value: 31 } }, { items { uint32_value: 12 } items { text_value: "David" } items { uint32_value: 41 } }, { items { uint32_value: 13 } items { text_value: "Frank" } items { uint32_value: 45 } } { items { uint32_value: 28 } items { uint32_value: 3 } }, { items { uint32_value: 31 } items { uint32_value: 2 } }, { items { uint32_value: 41 } items { uint32_value: 12 } }, { items { uint32_value: 45 } items { uint32_value: 13 } } { items { uint64_value: 4 } } >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2026-01-10T12:11:36.002592Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:11:36.011998Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2026-01-10T12:11:36.012062Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-10T12:11:36.012205Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-10T12:11:36.013050Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2026-01-10T12:11:36.013121Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 1 is not local. 2026-01-10T12:11:36.013237Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-10T12:11:36.013261Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:11:36.013327Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2026-01-10T12:11:36.013351Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-10T12:11:36.013434Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2026-01-10T12:11:36.013472Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 5 is not local. 2026-01-10T12:11:36.013533Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-10T12:11:36.013613Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2026-01-10T12:11:36.013638Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 7 is not local. 2026-01-10T12:11:36.013665Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-10T12:11:36.013683Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:11:36.013721Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2026-01-10T12:11:36.013741Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 8 is not local. 2026-01-10T12:11:36.013781Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2026-01-10T12:11:27.136387Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704420047777902:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.136737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.512204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.679226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.679305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.691615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.781346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:27.799495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.829802Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006608s 2026-01-10T12:11:28.138144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:28.144900Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704415752810416:2051] Subscribe: subscriber# [1:7593704424342745828:2448], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.144905Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704415752810422:2057] Subscribe: subscriber# [1:7593704424342745830:2448], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.144983Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704415752810419:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704424342745829:2448] 2026-01-10T12:11:28.144998Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704415752810419:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.145017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704424342745828:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704415752810416:2051] 2026-01-10T12:11:28.145027Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704415752810419:2054] Subscribe: subscriber# [1:7593704424342745829:2448], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.145079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704424342745830:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704415752810422:2057] 2026-01-10T12:11:28.145105Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704424342745829:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704415752810419:2054] 2026-01-10T12:11:28.145434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704424342745824:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704424342745825:2448] 2026-01-10T12:11:28.145507Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704415752810416:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704424342745828:2448] 2026-01-10T12:11:28.145540Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704415752810422:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704424342745830:2448] 2026-01-10T12:11:28.145555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704415752810419:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704424342745829:2448] 2026-01-10T12:11:28.145636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704424342745824:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704424342745827:2448] 2026-01-10T12:11:28.145706Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593704424342745824:2448][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7593704420047778034:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:28.145756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704424342745824:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704424342745826:2448] 2026-01-10T12:11:28.145803Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7593704424342745824:2448][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7593704420047778034:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:28.145869Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704420047778034:2122], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-10T12:11:28.145955Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704420047778034:2122], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593704424342745824:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.182617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704420047778034:2122], cacheItem# { Subscriber: { Subscriber: [1:7593704424342745824:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:28.182774Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704424342745831:2449], recipient# [1:7593704424342745823:2287], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.183974Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704420047778027:2119] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.184037Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704424342745833:2451] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.184160Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704420047778034:2122], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.184271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704420047778284:2284][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704420047778034:2122], cookie# 1 2026-01-10T12:11:28.184380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420047778288:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778285:2284], cookie# 1 2026-01-10T12:11:28.184412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420047778289:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778286:2284], cookie# 1 2026-01-10T12:11:28.184434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420047778290:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778287:2284], cookie# 1 2026-01-10T12:11:28.184470Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704415752810416:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778288:2284], cookie# 1 2026-01-10T12:11:28.184514Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704415752810419:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778289:2284], cookie# 1 2026-01-10T12:11:28.184537Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704415752810422:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420047778290:2284], cookie# 1 2026-01-10T12:11:28.184612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420047778288:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704415752810416:2051], cookie# 1 2026-01-10T12:11:28.184630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420047778289:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704415752810419:2054], cookie# 1 2026-01-10T12:11:28.184647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420047778290:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704415752810422:2057], cookie# 1 2026-01-10T12:11:28.184695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420047778284:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420047778285:2284], cookie# 1 2026-01-10T12:11:28.184720Z node 1 :SCHEME_BOARD_S ... rtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-10T12:11:31.230068Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704437227648291:2876], recipient# [1:7593704437227648290:2875], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:31.230104Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704437227648290:2875] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:31.230171Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704437227648290:2875] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2026-01-10T12:11:31.231312Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704437227648290:2875] Handle TEvDescribeSchemeResult Forward to# [1:7593704437227648289:2874] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1768047090950 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046644480 2026-01-10T12:11:31.237632Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:31.292574Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704433317793791:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.292761Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704433317793791:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704433317793887:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:31.292890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704437612761584:2370], recipient# [3:7593704437612761583:2312], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.339256Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704415752810416:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704433317793879:2112] 2026-01-10T12:11:31.339307Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704415752810416:2051] Unsubscribe: subscriber# [3:7593704433317793879:2112], path# /dc-1/USER_0 2026-01-10T12:11:31.339338Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704415752810419:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704433317793880:2112] 2026-01-10T12:11:31.339349Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704415752810419:2054] Unsubscribe: subscriber# [3:7593704433317793880:2112], path# /dc-1/USER_0 2026-01-10T12:11:31.339392Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704415752810422:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704433317793881:2112] 2026-01-10T12:11:31.339404Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704415752810422:2057] Unsubscribe: subscriber# [3:7593704433317793881:2112], path# /dc-1/USER_0 2026-01-10T12:11:31.339588Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:11:31.340808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:11:32.298827Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704433317793791:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.298985Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704433317793791:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704433317793887:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.299087Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704441907728897:2373], recipient# [3:7593704441907728896:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2026-01-10T12:11:27.235397Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704418506869859:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.235465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.242094Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005714s 2026-01-10T12:11:27.694539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.734890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.735062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.769345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.900214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:27.926556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.212284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704418506870078:2144] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.212352Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704422801837851:2461] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.212480Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704418506870085:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.212571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704418506870329:2301][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704418506870085:2147], cookie# 1 2026-01-10T12:11:28.214306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704418506870349:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870346:2301], cookie# 1 2026-01-10T12:11:28.214362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704418506870350:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870347:2301], cookie# 1 2026-01-10T12:11:28.214379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704418506870351:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870348:2301], cookie# 1 2026-01-10T12:11:28.214412Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704414211902427:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870349:2301], cookie# 1 2026-01-10T12:11:28.214438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704414211902430:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870350:2301], cookie# 1 2026-01-10T12:11:28.214455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704414211902433:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704418506870351:2301], cookie# 1 2026-01-10T12:11:28.214516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704418506870349:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704414211902427:2051], cookie# 1 2026-01-10T12:11:28.214534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704418506870350:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704414211902430:2054], cookie# 1 2026-01-10T12:11:28.214568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704418506870351:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704414211902433:2057], cookie# 1 2026-01-10T12:11:28.214622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704418506870329:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704418506870346:2301], cookie# 1 2026-01-10T12:11:28.214656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704418506870329:2301][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.214678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704418506870329:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704418506870347:2301], cookie# 1 2026-01-10T12:11:28.214708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704418506870329:2301][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.214740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704418506870329:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704418506870348:2301], cookie# 1 2026-01-10T12:11:28.214753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704418506870329:2301][/dc-1] Sync cookie mismatch: sender# [1:7593704418506870348:2301], cookie# 1, current cookie# 0 2026-01-10T12:11:28.214797Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704418506870085:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.225544Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704418506870085:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704418506870329:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.225686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704418506870085:2147], cacheItem# { Subscriber: { Subscriber: [1:7593704418506870329:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.229628Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704422801837852:2462], recipient# [1:7593704422801837851:2461], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.229698Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704422801837851:2461] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.240691Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704418506870085:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.240772Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593704418506870085:2147], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-10T12:11:28.241006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593704422801837854:2463][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:11:28.241537Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704414211902427:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704422801837858:2463] 2026-01-10T12:11:28.241554Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704414211902427:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.241636Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704414211902427:2051] Subscribe: subscriber# [1:7593704422801837858:2463], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.241700Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704414211902430:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704422801837859:2463] 2026-01-10T12:11:28.241708Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704414211902430:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.241728Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704414211902430:2054] Subscribe: subscriber# [1:7593704422801837859:2463], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.241751Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704414211902433:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704422801837860:2463] 2026-01-10T12:11:28.241757Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704414211902433:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.241787Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:75937044142119 ... HARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2026-01-10T12:11:29.845187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:11:29.845289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2026-01-10T12:11:29.845410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-10T12:11:29.845566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2026-01-10T12:11:29.848446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:11:29.852187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2026-01-10T12:11:29.852448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-01-10T12:11:29.852601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2026-01-10T12:11:29.852708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:11:29.852814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2026-01-10T12:11:29.852978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:11:29.853009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2026-01-10T12:11:29.853055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-01-10T12:11:29.853170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:11:29.853195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-10T12:11:29.853292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2026-01-10T12:11:29.883603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:11:29.883645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:11:29.883691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-10T12:11:29.883699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-10T12:11:29.883729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:11:29.883737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:11:29.883755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:11:29.883760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T12:11:29.883777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-10T12:11:29.883782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-10T12:11:29.883797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:11:29.883802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:11:29.883827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2026-01-10T12:11:29.883835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-01-10T12:11:29.883866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:11:29.883889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:11:29.883954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2026-01-10T12:11:29.884006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:11:29.884029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:11:29.884048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-10T12:11:29.884134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:11:29.906028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:11:30.180249Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:30.184352Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704426423535505:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:30.184533Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704430718503134:2309], recipient# [3:7593704430718503133:2306], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.196248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704426423535505:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.196431Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704435013470432:2310], recipient# [3:7593704435013470431:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.200651Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704426423535505:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.200804Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704439308437730:2311], recipient# [3:7593704439308437729:2308], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.200943Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704426423535505:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.201190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704443603405028:2312], recipient# [3:7593704443603405027:2309], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2026-01-10T12:11:26.984209Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.008670s 2026-01-10T12:11:27.060060Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704416746126291:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.060178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.158093Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704417081722452:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.158147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.636885Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.660476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.725069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.725219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.727017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.727097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.740742Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:11:27.740976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.742164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.828751Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:27.914209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.918414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.086340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:28.180336Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:28.462704Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704416746126529:2145] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.462745Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704421041094317:2467] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.462843Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416746126536:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.462932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704416746126759:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704416746126536:2148], cookie# 1 2026-01-10T12:11:28.464542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704416746126779:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126776:2292], cookie# 1 2026-01-10T12:11:28.464578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704416746126780:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126777:2292], cookie# 1 2026-01-10T12:11:28.464593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704416746126781:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126778:2292], cookie# 1 2026-01-10T12:11:28.464625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704412451158877:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126779:2292], cookie# 1 2026-01-10T12:11:28.464653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704412451158880:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126780:2292], cookie# 1 2026-01-10T12:11:28.464669Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704412451158883:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704416746126781:2292], cookie# 1 2026-01-10T12:11:28.464730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704416746126779:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704412451158877:2052], cookie# 1 2026-01-10T12:11:28.464748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704416746126780:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704412451158880:2055], cookie# 1 2026-01-10T12:11:28.464763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704416746126781:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704412451158883:2058], cookie# 1 2026-01-10T12:11:28.464801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704416746126759:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416746126776:2292], cookie# 1 2026-01-10T12:11:28.464841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704416746126759:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.464857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704416746126759:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416746126777:2292], cookie# 1 2026-01-10T12:11:28.464884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704416746126759:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.464909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704416746126759:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416746126778:2292], cookie# 1 2026-01-10T12:11:28.464923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704416746126759:2292][/dc-1] Sync cookie mismatch: sender# [1:7593704416746126778:2292], cookie# 1, current cookie# 0 2026-01-10T12:11:28.464970Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704416746126536:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.473339Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704416746126536:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704416746126759:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.473464Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416746126536:2148], cacheItem# { Subscriber: { Subscriber: [1:7593704416746126759:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.478076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704421041094318:2468], recipient# [1:7593704421041094317:2467], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.478182Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704421041094317:2467] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.546713Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704421041094317:2467] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:28.550004Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704421041094317:2467] Handle TEvDescribeSchemeResult Forward to# [1:7593704421041094316:2466] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 7205 ... e ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.195169Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704417081722487:2107], cacheItem# { Subscriber: { Subscriber: [2:7593704421376690015:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.195273Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704447146493920:2137], recipient# [2:7593704447146493919:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.384593Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704417081722487:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.384750Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704417081722487:2107], cacheItem# { Subscriber: { Subscriber: [2:7593704442851526536:2126] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.384822Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704417081722487:2107], cacheItem# { Subscriber: { Subscriber: [2:7593704442851526537:2127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.384941Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704447146493921:2138], recipient# [2:7593704442851526532:2300], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.385648Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7593704442851526532:2300], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:34.465669Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7593704442851526545:2128] 2026-01-10T12:11:34.465748Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.465773Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7593704442851526546:2128] 2026-01-10T12:11:34.465829Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.465862Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7593704442851526547:2128] 2026-01-10T12:11:34.465887Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526538:2128][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.465992Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7593704442851526539:2127] 2026-01-10T12:11:34.466033Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.466062Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7593704442851526540:2127] 2026-01-10T12:11:34.466088Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.466111Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7593704442851526541:2127] 2026-01-10T12:11:34.466137Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526537:2127][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.481428Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7593704442851526552:2126] 2026-01-10T12:11:34.481505Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.481529Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7593704442851526553:2126] 2026-01-10T12:11:34.481552Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.481592Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7593704442851526554:2126] 2026-01-10T12:11:34.481637Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7593704442851526536:2126][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7593704417081722487:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2026-01-10T12:11:27.824297Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704419506715931:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.824358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.904607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:28.307036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.340528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:28.340663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:28.351272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:28.480144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:28.502903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.845045Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:28.913065Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704419506716042:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.913136Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704423801683839:2442] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.913235Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419506716049:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.913325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704423801683654:2315][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704419506716049:2121], cookie# 1 2026-01-10T12:11:28.914825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423801683676:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683673:2315], cookie# 1 2026-01-10T12:11:28.914881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423801683677:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683674:2315], cookie# 1 2026-01-10T12:11:28.914896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423801683678:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683675:2315], cookie# 1 2026-01-10T12:11:28.914993Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419506715727:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683676:2315], cookie# 1 2026-01-10T12:11:28.915029Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419506715730:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683677:2315], cookie# 1 2026-01-10T12:11:28.915047Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419506715733:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423801683678:2315], cookie# 1 2026-01-10T12:11:28.915113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423801683676:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419506715727:2050], cookie# 1 2026-01-10T12:11:28.915142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423801683677:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419506715730:2053], cookie# 1 2026-01-10T12:11:28.915157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423801683678:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419506715733:2056], cookie# 1 2026-01-10T12:11:28.915213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423801683654:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423801683673:2315], cookie# 1 2026-01-10T12:11:28.915252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704423801683654:2315][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.915281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423801683654:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423801683674:2315], cookie# 1 2026-01-10T12:11:28.915302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704423801683654:2315][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.915329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423801683654:2315][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423801683675:2315], cookie# 1 2026-01-10T12:11:28.915345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704423801683654:2315][/dc-1] Sync cookie mismatch: sender# [1:7593704423801683675:2315], cookie# 1, current cookie# 0 2026-01-10T12:11:28.915416Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419506716049:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.927280Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419506716049:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704423801683654:2315] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.927441Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704423801683654:2315] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.930389Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704423801683840:2443], recipient# [1:7593704423801683839:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.930483Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704423801683839:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.982270Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704423801683839:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:28.986004Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704423801683839:2442] Handle TEvDescribeSchemeResult Forward to# [1:7593704423801683838:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:11:29.000235Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593704419506716042:2118] Handle TEvProposeTransaction 2026-01-10T12:11:29.000263Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593704419506716042:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:11:29.000329Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593704419506716042:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593704428096651147:2450] 2026-01-10T12:11:29.199386Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593704428096651147:2450] txid# 281474976710657 Bootstrap EvSch ... nder# [1:7593704440981553982:3148] 2026-01-10T12:11:32.948670Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704419506715733:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704440981553988:3149] 2026-01-10T12:11:32.948731Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-10T12:11:32.948824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593704440981553968:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:32.948975Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704440981553968:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.949011Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:11:32.949075Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593704440981553969:3148] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:32.949131Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704440981553969:3148] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.949178Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:11:32.949225Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419506716049:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593704440981553970:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:32.949279Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704440981553970:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.949356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704440981553989:3150], recipient# [1:7593704440981553964:2330], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.949402Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704440981553990:3151], recipient# [1:7593704440981553966:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.828561Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419506716049:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.828693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704423801683829:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.828781Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704445276521308:3157], recipient# [1:7593704445276521307:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.848468Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419506716049:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.848590Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704423801683829:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.848663Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704445276521310:3158], recipient# [1:7593704445276521309:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.957452Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419506716049:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.957617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419506716049:2121], cacheItem# { Subscriber: { Subscriber: [1:7593704440981553970:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.957834Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704445276521315:3162], recipient# [1:7593704445276521314:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2026-01-10T12:11:26.826473Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704416085403554:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:26.826534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.161058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.250252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.250411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.259417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.348220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:27.406709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.656307Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704416085403783:2117] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:27.656361Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704420380371579:2450] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:27.656474Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:27.656578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704420380371336:2284][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704416085403791:2120], cookie# 1 2026-01-10T12:11:27.658348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420380371347:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371343:2284], cookie# 1 2026-01-10T12:11:27.658394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420380371348:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371344:2284], cookie# 1 2026-01-10T12:11:27.658432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420380371349:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371345:2284], cookie# 1 2026-01-10T12:11:27.658480Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704416085403471:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371347:2284], cookie# 1 2026-01-10T12:11:27.658512Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704416085403474:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371348:2284], cookie# 1 2026-01-10T12:11:27.658544Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704416085403477:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420380371349:2284], cookie# 1 2026-01-10T12:11:27.658598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420380371347:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416085403471:2050], cookie# 1 2026-01-10T12:11:27.658614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420380371348:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416085403474:2053], cookie# 1 2026-01-10T12:11:27.658628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420380371349:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704416085403477:2056], cookie# 1 2026-01-10T12:11:27.658685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420380371336:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420380371343:2284], cookie# 1 2026-01-10T12:11:27.658710Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704420380371336:2284][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:27.658726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420380371336:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420380371344:2284], cookie# 1 2026-01-10T12:11:27.658764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704420380371336:2284][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:27.658793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420380371336:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420380371345:2284], cookie# 1 2026-01-10T12:11:27.658807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704420380371336:2284][/dc-1] Sync cookie mismatch: sender# [1:7593704420380371345:2284], cookie# 1, current cookie# 0 2026-01-10T12:11:27.658856Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704416085403791:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:27.669203Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704416085403791:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704420380371336:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:27.669345Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704420380371336:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:27.672252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704420380371580:2451], recipient# [1:7593704420380371579:2450], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:27.672333Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704420380371579:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:27.706145Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704420380371579:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:27.709765Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704420380371579:2450] Handle TEvDescribeSchemeResult Forward to# [1:7593704420380371578:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:11:27.717317Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593704416085403783:2117] Handle TEvProposeTransaction 2026-01-10T12:11:27.717360Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593704416085403783:2117] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:11:27.717448Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593704416085403783:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593704420380371585:2455] 2026-01-10T12:11:27.845404Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:27.845498Z nod ... 480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.886961Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704441855208926:3076], recipient# [1:7593704441855208925:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.780634Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.780769Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704441855208900:3070] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.780846Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704446150176242:3080], recipient# [1:7593704446150176241:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.837074Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.837209Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704420380371588:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.837285Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704446150176244:3081], recipient# [1:7593704446150176243:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.888973Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.889128Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704420380371588:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.889210Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704446150176246:3082], recipient# [1:7593704446150176245:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.784402Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.784540Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704441855208900:3070] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.784626Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704450445143562:3086], recipient# [1:7593704450445143561:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.839658Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.839784Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704420380371588:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.839849Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704450445143564:3087], recipient# [1:7593704450445143563:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.892443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704416085403791:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.892581Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704416085403791:2120], cacheItem# { Subscriber: { Subscriber: [1:7593704420380371588:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.892660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704450445143566:3088], recipient# [1:7593704450445143565:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-10T12:11:27.257292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704419783675957:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.257351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.755077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.819020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.819119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.829209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.884359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:28.034570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.212304Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704419783676204:2144] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.212386Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704424078643959:2448] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.212529Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419783676210:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.212650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704419783676429:2288][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704419783676210:2146], cookie# 1 2026-01-10T12:11:28.214598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704419783676453:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676450:2288], cookie# 1 2026-01-10T12:11:28.214669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704419783676454:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676451:2288], cookie# 1 2026-01-10T12:11:28.214688Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704419783676455:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676452:2288], cookie# 1 2026-01-10T12:11:28.214732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419783675848:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676453:2288], cookie# 1 2026-01-10T12:11:28.214765Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419783675851:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676454:2288], cookie# 1 2026-01-10T12:11:28.214784Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419783675854:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704419783676455:2288], cookie# 1 2026-01-10T12:11:28.214859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704419783676453:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783675848:2051], cookie# 1 2026-01-10T12:11:28.214884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704419783676454:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783675851:2054], cookie# 1 2026-01-10T12:11:28.214900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704419783676455:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783675854:2057], cookie# 1 2026-01-10T12:11:28.214945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704419783676429:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783676450:2288], cookie# 1 2026-01-10T12:11:28.214977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704419783676429:2288][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.215024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704419783676429:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783676451:2288], cookie# 1 2026-01-10T12:11:28.215058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704419783676429:2288][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.215092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704419783676429:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419783676452:2288], cookie# 1 2026-01-10T12:11:28.215108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704419783676429:2288][/dc-1] Sync cookie mismatch: sender# [1:7593704419783676452:2288], cookie# 1, current cookie# 0 2026-01-10T12:11:28.215458Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419783676210:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.223569Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419783676210:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704419783676429:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.223704Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419783676210:2146], cacheItem# { Subscriber: { Subscriber: [1:7593704419783676429:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.227557Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704424078643960:2449], recipient# [1:7593704424078643959:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.227651Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704424078643959:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.260892Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419783676210:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.261009Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593704419783676210:2146], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-10T12:11:28.261358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593704424078643962:2450][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:11:28.267508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704419783675848:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704424078643966:2450] 2026-01-10T12:11:28.267537Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704419783675848:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.267604Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704419783675848:2051] Subscribe: subscriber# [1:7593704424078643966:2450], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.267650Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704419783675851:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704424078643967:2450] 2026-01-10T12:11:28.267660Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704419783675851:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.267682Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704419783675851:2054] Subscribe: subscriber# [1:7593704424078643967:2450], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:28.267721Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704419783675854:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704424078643968:2450] 2026-01-10T12:11:28.267731Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704419783675854:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:28.267767Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704419783675854:2057] Subscribe: subscriber# [1:7593704424078643968:2450], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72 ... : 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:30.121985Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704419783675848:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704432614636747:2349] 2026-01-10T12:11:30.122017Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704419783675851:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704432614636748:2349] 2026-01-10T12:11:30.122033Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704419783675854:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704432614636749:2349] 2026-01-10T12:11:30.225109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704419783675848:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704428319669056:2110] 2026-01-10T12:11:30.225149Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704419783675848:2051] Unsubscribe: subscriber# [3:7593704428319669056:2110], path# /dc-1/USER_0 2026-01-10T12:11:30.225204Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704419783675851:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704428319669057:2110] 2026-01-10T12:11:30.225214Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704419783675851:2054] Unsubscribe: subscriber# [3:7593704428319669057:2110], path# /dc-1/USER_0 2026-01-10T12:11:30.225248Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7593704419783675854:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7593704428319669058:2110] 2026-01-10T12:11:30.225258Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7593704419783675854:2057] Unsubscribe: subscriber# [3:7593704428319669058:2110], path# /dc-1/USER_0 2026-01-10T12:11:30.225888Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:11:30.245884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:11:30.311687Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419783676210:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:30.311819Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419783676210:2146], cacheItem# { Subscriber: { Subscriber: [1:7593704424078643962:2450] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:30.311912Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704432668579094:2863], recipient# [1:7593704432668579093:2299], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.120257Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704428319669042:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:31.120384Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704428319669042:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704432614636743:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:31.120495Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704436909604083:2360], recipient# [3:7593704436909604082:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.118001Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704428319669042:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:32.118178Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704428319669042:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704432614636743:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:32.118264Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704441204571381:2361], recipient# [3:7593704441204571380:2314], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.120690Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704428319669042:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:33.120820Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704428319669042:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704432614636743:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:33.120936Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704445499538679:2362], recipient# [3:7593704445499538678:2315], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.107650Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704428319668907:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:34.107716Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:34.124707Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704428319669042:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.124825Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704428319669042:2107], cacheItem# { Subscriber: { Subscriber: [3:7593704432614636743:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:34.124899Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704449794505977:2363], recipient# [3:7593704449794505976:2316], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2026-01-10T12:11:27.809530Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704419287678873:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.809969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:28.152188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.287603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:28.287717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:28.301155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:28.409562Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:28.419500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.732453Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704419287679008:2120] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.732515Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704423582646809:2453] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.732651Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419287679014:2122], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.732739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704423582646536:2269][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704419287679014:2122], cookie# 1 2026-01-10T12:11:28.734527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423582646540:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646537:2269], cookie# 1 2026-01-10T12:11:28.734562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423582646541:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646538:2269], cookie# 1 2026-01-10T12:11:28.734577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423582646542:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646539:2269], cookie# 1 2026-01-10T12:11:28.734651Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419287678690:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646540:2269], cookie# 1 2026-01-10T12:11:28.734702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419287678693:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646541:2269], cookie# 1 2026-01-10T12:11:28.734723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704419287678696:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423582646542:2269], cookie# 1 2026-01-10T12:11:28.734770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423582646540:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419287678690:2051], cookie# 1 2026-01-10T12:11:28.734786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423582646541:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419287678693:2054], cookie# 1 2026-01-10T12:11:28.734807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423582646542:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704419287678696:2057], cookie# 1 2026-01-10T12:11:28.734851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423582646536:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423582646537:2269], cookie# 1 2026-01-10T12:11:28.734879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704423582646536:2269][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.734905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423582646536:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423582646538:2269], cookie# 1 2026-01-10T12:11:28.734931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704423582646536:2269][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.734960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423582646536:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423582646539:2269], cookie# 1 2026-01-10T12:11:28.734997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704423582646536:2269][/dc-1] Sync cookie mismatch: sender# [1:7593704423582646539:2269], cookie# 1, current cookie# 0 2026-01-10T12:11:28.735073Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704419287679014:2122], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.742052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704419287679014:2122], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704423582646536:2269] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.742231Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704419287679014:2122], cacheItem# { Subscriber: { Subscriber: [1:7593704423582646536:2269] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.749307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704423582646810:2454], recipient# [1:7593704423582646809:2453], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.749414Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704423582646809:2453] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.786896Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704423582646809:2453] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:28.791460Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704423582646809:2453] Handle TEvDescribeSchemeResult Forward to# [1:7593704423582646808:2452] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:11:28.792918Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593704419287679008:2120] Handle TEvProposeTransaction 2026-01-10T12:11:28.792938Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593704419287679008:2120] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:11:28.792988Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593704419287679008:2120] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593704423582646816:2459] 2026-01-10T12:11:28.810821Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704419287679014:2122], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.810936Z nod ... 3704446837942950:2518], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.741036Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704446837942975:2255], recipient# [3:7593704446837942957:2522], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.741075Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704446837942976:2256], recipient# [3:7593704446837942946:2514], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.741126Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704446837942977:2257], recipient# [3:7593704425363106426:2495], result# { ErrorCount: 3 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.743171Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7593704446837942950:2518], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:34.743329Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/USER_0/.metadata/script_executions 2026-01-10T12:11:34.743376Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:34.743398Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:34.744176Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:34.751137Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942964:2248] 2026-01-10T12:11:34.751216Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.751241Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942965:2248] 2026-01-10T12:11:34.751262Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.751279Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942966:2248] 2026-01-10T12:11:34.751299Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.773176Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942964:2248] 2026-01-10T12:11:34.773255Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.773275Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942965:2248] 2026-01-10T12:11:34.773299Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.773319Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942966:2248] 2026-01-10T12:11:34.773342Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.813597Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942964:2248] 2026-01-10T12:11:34.813670Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.813692Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942965:2248] 2026-01-10T12:11:34.813711Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.813727Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704446837942959:2248][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7593704446837942966:2248] 2026-01-10T12:11:34.813746Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704446837942959:2248][/dc-1/USER_0] Ignore empty state: owner# [3:7593704425363106440:2237], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:34.839143Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704425363106440:2237], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.839310Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704446837942991:2258], recipient# [3:7593704446837942950:2518], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:34.839759Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7593704446837942950:2518], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2026-01-10T12:10:28.511473Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704165540569042:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:28.511820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:28.961835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:28.961932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:29.236244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704165540568889:2081] 1768047028501047 != 1768047028501050 2026-01-10T12:10:29.328848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:29.396826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:29.544225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:30.107654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:30.107683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:30.107689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:30.107768Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:30.243485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:30.253412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:10:33.448521Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:10:33.465648Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:10:33.465728Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:10:33.465758Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:10:33.471192Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Describe result: PathErrorUnknown 2026-01-10T12:10:33.471213Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Creating table 2026-01-10T12:10:33.471264Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:10:33.471362Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Describe result: PathErrorUnknown 2026-01-10T12:10:33.471385Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Creating table 2026-01-10T12:10:33.471402Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:10:33.475867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:33.476839Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Describe result: PathErrorUnknown 2026-01-10T12:10:33.476860Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Creating table 2026-01-10T12:10:33.476896Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:10:33.486182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:33.489051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:33.495614Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:10:33.495673Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Subscribe on create table tx: 281474976710658 2026-01-10T12:10:33.495806Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:10:33.495834Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Subscribe on create table tx: 281474976710659 2026-01-10T12:10:33.498242Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:10:33.498269Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:10:33.498328Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:10:33.498349Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Subscribe on create table tx: 281474976710660 2026-01-10T12:10:33.502543Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:10:33.516150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704165540569042:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:33.516204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:10:33.643523Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:10:33.678836Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:10:33.678910Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593704187015406209:2495] Owner: [1:7593704187015406206:2492]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:10:33.728718Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-10T12:10:33.728793Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Column diff is empty, finishing 2026-01-10T12:10:33.729770Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:10:33.730711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:10:33.732183Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:10:33.732196Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7593704187015406207:2493] Owner: [1:7593704187015406206:2492]. Successful alter request: ExecComplete 2026-01-10T12:10:33.732383Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:10:33.732418Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Column diff is empty, finishing 2026-01-10T12:10:33.732467Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704187015406208:2494] Owner: [1:7593704187015406206:2492]. Created ESch ... oxy_service.cpp:926: Forwarded response to sender actor, requestId: 64, sender: [4:7593704446959416048:2649], selfId: [4:7593704386829871948:2265], source: [4:7593704446959416025:2642] 2026-01-10T12:11:34.758309Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7593704446959416021:2639], ActorId: [4:7593704446959416022:2640], TraceId: ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=4&id=NmVhMDFjYjUtNjJlYWIwYzktYmM0NDE4NmUtMjZkNDc5MDI=, TxId: 2026-01-10T12:11:34.758454Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7593704446959416021:2639], ActorId: [4:7593704446959416022:2640], TraceId: ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NmVhMDFjYjUtNjJlYWIwYzktYmM0NDE4NmUtMjZkNDc5MDI=, TxId: 2026-01-10T12:11:34.758511Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7593704446959416021:2639], ActorId: [4:7593704446959416022:2640], TraceId: ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-10T12:11:34.758748Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [4:7593704446959416020:2638], ActorId: [4:7593704446959416021:2639], TraceId: ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [4:7593704446959416022:2640] SUCCESS 2026-01-10T12:11:34.759430Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NmVhMDFjYjUtNjJlYWIwYzktYmM0NDE4NmUtMjZkNDc5MDI=, workerId: [4:7593704446959416025:2642], local sessions count: 1 2026-01-10T12:11:34.759692Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7593704442664448600:3110] ActorId: [4:7593704442664448648:3131] Database: /dc-1 ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-10T12:11:34.759732Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7593704442664448600:3110] ActorId: [4:7593704442664448648:3131] Database: /dc-1 ExecutionId: 6d506a02-f21900c-2f6350d6-9d493411. Reply success 2026-01-10T12:11:34.759803Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4833: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704442664448600:3110]. Lease check #0 [4:7593704442664448650:3133] successfully completed, OperationsToCheck: 0 2026-01-10T12:11:34.759825Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704442664448600:3110]. Finish, success: 1, issues: 2026-01-10T12:11:34.759854Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2026-01-10T12:11:34.862380Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx3b2d78nvk7cyq42f5wb6, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=Y2VmMmUwZTEtMzI2YTZmYy01NzEyN2Q2YS1hNzE5MTdi, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 65, targetId: [4:7593704429779546533:2560] 2026-01-10T12:11:34.862451Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 65 timeout: 300.000000s actor id: [4:7593704446959416077:3181] 2026-01-10T12:11:35.082415Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx3b2d78nvk7cyq42f5wb6", Forwarded response to sender actor, requestId: 65, sender: [4:7593704446959416076:2654], selfId: [4:7593704386829871948:2265], source: [4:7593704429779546533:2560] 2026-01-10T12:11:35.109176Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx3ba47ppa8rdg5pa6t1te, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=Y2VmMmUwZTEtMzI2YTZmYy01NzEyN2Q2YS1hNzE5MTdi, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 66, targetId: [4:7593704429779546533:2560] 2026-01-10T12:11:35.109248Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 66 timeout: 300.000000s actor id: [4:7593704451254383393:3190] 2026-01-10T12:11:35.292265Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:94: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2026-01-10T12:11:35.292333Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:109: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.862015s 2026-01-10T12:11:35.614680Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:35.614711Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:36.156249Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:53: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Start lease checker: [4:7593704455549350710:3202] 2026-01-10T12:11:36.156337Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4782: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704455549350710:3202]. Bootstrap. Started TListExpiredLeasesQueryActor: [4:7593704455549350711:3203] 2026-01-10T12:11:36.156350Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7593704455549350710:3202], ActorId: [4:7593704455549350711:3203], Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-10T12:11:36.157469Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976026613.394178s seconds to be completed 2026-01-10T12:11:36.161231Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, workerId: [4:7593704455549350713:2670], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:11:36.161601Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:11:36.162217Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7593704455549350710:3202], ActorId: [4:7593704455549350711:3203], TraceId: [4:7593704455549350710:3202], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, TxId: , text: -- TListExpiredLeasesQueryActor::OnRunQuery DECLARE $max_lease_deadline AS Timestamp; DECLARE $max_listed_leases AS Uint64; SELECT database, execution_id FROM `.metadata/script_execution_leases` WHERE lease_deadline < $max_lease_deadline AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL) LIMIT $max_listed_leases; 2026-01-10T12:11:36.162632Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 68, targetId: [4:7593704455549350713:2670] 2026-01-10T12:11:36.162686Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 68 timeout: 300.000000s actor id: [4:7593704455549350715:3204] 2026-01-10T12:11:36.173893Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 68, sender: [4:7593704455549350714:2671], selfId: [4:7593704386829871948:2265], source: [4:7593704455549350713:2670] 2026-01-10T12:11:36.174689Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7593704455549350710:3202], ActorId: [4:7593704455549350711:3203], TraceId: [4:7593704455549350710:3202], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, TxId: 2026-01-10T12:11:36.174779Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4759: [ScriptExecutions] [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7593704455549350710:3202], ActorId: [4:7593704455549350711:3203], TraceId: [4:7593704455549350710:3202], Found 0 expired leases (fetched rows 0) 2026-01-10T12:11:36.174814Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7593704455549350710:3202], ActorId: [4:7593704455549350711:3203], TraceId: [4:7593704455549350710:3202], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, TxId: 2026-01-10T12:11:36.174932Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4794: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704455549350710:3202]. Got list expired leases response [4:7593704455549350711:3203], found 0 expired leases 2026-01-10T12:11:36.174965Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4812: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704455549350710:3202]. List expired leases successfully completed 2026-01-10T12:11:36.174992Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7593704438369481257:3088] ActorId: [4:7593704455549350710:3202]. Finish, success: 1, issues: 2026-01-10T12:11:36.175033Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2026-01-10T12:11:36.177325Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=YjlmZGY5MjktZWQyZDhkYjktNjQ4NTMxMjMtNGYzNGE2Y2I=, workerId: [4:7593704455549350713:2670], local sessions count: 1 2026-01-10T12:11:36.191068Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx3ba47ppa8rdg5pa6t1te", Forwarded response to sender actor, requestId: 66, sender: [4:7593704451254383392:2659], selfId: [4:7593704386829871948:2265], source: [4:7593704429779546533:2560] 2026-01-10T12:11:36.211002Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=Y2VmMmUwZTEtMzI2YTZmYy01NzEyN2Q2YS1hNzE5MTdi, workerId: [4:7593704429779546533:2560], local sessions count: 0 2026-01-10T12:11:36.296122Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:94: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2026-01-10T12:11:36.296180Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:109: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.177777s |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> LocalPartitionReader::Retries >> LocalPartitionReader::Simple >> LocalPartitionReader::Retries [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> LocalPartitionReader::Simple [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> TGRpcCmsTest::SimpleTenantsTest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Retries [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TGRpcCmsTest::DisabledTxTest >> KqpStreamLookup::ReadTableDuringSplit >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2026-01-10T12:11:27.204813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704420526881685:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:27.206702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:27.243541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:27.644207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:27.860165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:27.860369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:27.875063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:27.986355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:27.988188Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704420526881650:2081] 1768047087161704 != 1768047087161707 2026-01-10T12:11:28.012279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.208598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:28.242081Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704420526881881:2095] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:28.242143Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704424821849731:2449] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:28.242251Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704420526881906:2105], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:28.242336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704420526882079:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704420526881906:2105], cookie# 1 2026-01-10T12:11:28.244114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420526882097:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882094:2206], cookie# 1 2026-01-10T12:11:28.244150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420526882098:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882095:2206], cookie# 1 2026-01-10T12:11:28.244188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704420526882099:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882096:2206], cookie# 1 2026-01-10T12:11:28.244225Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704420526881618:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882097:2206], cookie# 1 2026-01-10T12:11:28.244254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704420526881621:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882098:2206], cookie# 1 2026-01-10T12:11:28.244270Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704420526881624:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704420526882099:2206], cookie# 1 2026-01-10T12:11:28.244327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420526882097:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526881618:2049], cookie# 1 2026-01-10T12:11:28.244358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420526882098:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526881621:2052], cookie# 1 2026-01-10T12:11:28.244376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704420526882099:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526881624:2055], cookie# 1 2026-01-10T12:11:28.244416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420526882079:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526882094:2206], cookie# 1 2026-01-10T12:11:28.244440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704420526882079:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:28.244459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420526882079:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526882095:2206], cookie# 1 2026-01-10T12:11:28.244494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704420526882079:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:28.244525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704420526882079:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704420526882096:2206], cookie# 1 2026-01-10T12:11:28.244540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704420526882079:2206][/dc-1] Sync cookie mismatch: sender# [1:7593704420526882096:2206], cookie# 1, current cookie# 0 2026-01-10T12:11:28.244585Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704420526881906:2105], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:28.257949Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704420526881906:2105], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704420526882079:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:28.258086Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704420526881906:2105], cacheItem# { Subscriber: { Subscriber: [1:7593704420526882079:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:28.265296Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704424821849732:2450], recipient# [1:7593704424821849731:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:28.265408Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704424821849731:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:28.312759Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704424821849731:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:28.317200Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704424821849731:2449] Handle TEvDescribeSchemeResult Forward to# [1:7593704424821849730:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:11:28.321165Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593704420526881881:2095] Handle TEvProposeTransaction 2026-01-10T12:11:28.321195Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593704420526881881:2095] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:11:28.321260Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593704420526881881:2095] Cookie# 0 userReqId# "" tx ... er# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.580460Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593704463920904770:2758] 2026-01-10T12:11:38.580473Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.612484Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7593704446741034563:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:38.612639Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593704446741034563:2109], cacheItem# { Subscriber: { Subscriber: [4:7593704463920904752:2756] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:38.612703Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593704446741034563:2109], cacheItem# { Subscriber: { Subscriber: [4:7593704463920904754:2757] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:38.612834Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593704463920904794:2761], recipient# [4:7593704463920904748:2359], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:38.613241Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7593704463920904748:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:38.620363Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593704463920904756:2757] 2026-01-10T12:11:38.620447Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620476Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593704463920904757:2757] 2026-01-10T12:11:38.620501Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620520Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7593704463920904758:2757] 2026-01-10T12:11:38.620552Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904754:2757][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620634Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593704463920904768:2758] 2026-01-10T12:11:38.620686Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620710Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593704463920904769:2758] 2026-01-10T12:11:38.620736Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620753Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593704463920904770:2758] 2026-01-10T12:11:38.620766Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593704463920904759:2756] 2026-01-10T12:11:38.620773Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904755:2758][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620802Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620825Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593704463920904760:2756] 2026-01-10T12:11:38.620849Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:38.620869Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7593704463920904761:2756] 2026-01-10T12:11:38.620893Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593704463920904752:2756][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7593704446741034563:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> HttpRequest::Probe [GOOD] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization >> TConfigsDispatcherTests::TestRemoveSubscription >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2026-01-10T12:11:28.372667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704423588465818:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:28.372722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:28.528173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:28.908891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:28.909035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:28.936180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:29.004796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:29.004915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:29.005247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:29.005298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:29.043852Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:11:29.044016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:29.073727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:29.171942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:29.221883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:29.240194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:29.402918Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:29.512777Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704423588466053:2145] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:29.512820Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704427883433822:2453] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:29.512933Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704423588466091:2162], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:29.513019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704423588466297:2306][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704423588466091:2162], cookie# 1 2026-01-10T12:11:29.514635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423588466352:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466349:2306], cookie# 1 2026-01-10T12:11:29.514694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423588466353:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466350:2306], cookie# 1 2026-01-10T12:11:29.514730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704423588466354:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466351:2306], cookie# 1 2026-01-10T12:11:29.514770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704423588465697:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466352:2306], cookie# 1 2026-01-10T12:11:29.514799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704423588465700:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466353:2306], cookie# 1 2026-01-10T12:11:29.514818Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704423588465703:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704423588466354:2306], cookie# 1 2026-01-10T12:11:29.514860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423588466352:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588465697:2052], cookie# 1 2026-01-10T12:11:29.514877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423588466353:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588465700:2055], cookie# 1 2026-01-10T12:11:29.514904Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704423588466354:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588465703:2058], cookie# 1 2026-01-10T12:11:29.514943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423588466297:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588466349:2306], cookie# 1 2026-01-10T12:11:29.514983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704423588466297:2306][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:11:29.515012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423588466297:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588466350:2306], cookie# 1 2026-01-10T12:11:29.515033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704423588466297:2306][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:11:29.515058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704423588466297:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704423588466351:2306], cookie# 1 2026-01-10T12:11:29.515078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704423588466297:2306][/dc-1] Sync cookie mismatch: sender# [1:7593704423588466351:2306], cookie# 1, current cookie# 0 2026-01-10T12:11:29.515131Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704423588466091:2162], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:11:29.522283Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704423588466091:2162], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704423588466297:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:29.522486Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704423588466091:2162], cacheItem# { Subscriber: { Subscriber: [1:7593704423588466297:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:11:29.525617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704427883433823:2454], recipient# [1:7593704427883433822:2453], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:11:29.525704Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704427883433822:2453] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:11:29.536272Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:29.567982Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704427883433822:2453] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:11:29.571861Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704427883433822:2453] Handle TEvDescribeSchemeResult Forward to# [1:7593704427883433821:2452] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 720575940466 ... nd result: self# [3:7593704469754953803:2233], recipient# [3:7593704469754953800:2536], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.839900Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:39.921459Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704452575084387:2184], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.921605Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704452575084387:2184], cacheItem# { Subscriber: { Subscriber: [3:7593704461165019127:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:39.921704Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704469754953805:2234], recipient# [3:7593704469754953804:2539], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.934614Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704452575084387:2184], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.934765Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704452575084387:2184], cacheItem# { Subscriber: { Subscriber: [3:7593704461165019126:2210] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:39.934834Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704452575084387:2184], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.934893Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704452575084387:2184], cacheItem# { Subscriber: { Subscriber: [3:7593704461165019127:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:39.934950Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704452575084387:2184], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.935009Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704452575084387:2184], cacheItem# { Subscriber: { Subscriber: [3:7593704461165019127:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:39.935078Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704469754953809:2235], recipient# [3:7593704469754953806:2540], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.935164Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704469754953810:2236], recipient# [3:7593704469754953807:2541], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.935206Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704469754953811:2237], recipient# [3:7593704469754953808:2542], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.968462Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704423652476811:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:39.968615Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704423652476811:2107], cacheItem# { Subscriber: { Subscriber: [2:7593704427947444305:2122] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:39.968718Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704470897117347:2142], recipient# [2:7593704470897117346:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:40.192254Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704423652476811:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:40.192442Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704423652476811:2107], cacheItem# { Subscriber: { Subscriber: [2:7593704453717248116:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:40.192559Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704475192084646:2143], recipient# [2:7593704475192084645:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:21.095515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:21.231724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:21.242393Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:21.242889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:21.243005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:21.728967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:21.848876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:21.849017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:21.885362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:21.987526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:22.975476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:22.976909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:22.976973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:22.977013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:22.977802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:23.076749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.812618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:29.058431Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:29.075877Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:29.079790Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:29.144236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.144401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.196014Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:29.204596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.553179Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.554108Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.555173Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.560528Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.560833Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.561286Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.561533Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.561706Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.561930Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.753521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:08:29.789676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.789807Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.804668Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.997080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:30.050599Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:30.050751Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:30.110664Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:30.112852Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:30.113151Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:30.113237Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:30.113304Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:30.113374Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:30.113444Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:30.113510Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:30.114508Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:30.319725Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:30.319895Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:30.363343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:08:30.363885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:08:30.648867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:08:30.649370Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:08:30.652347Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:08:30.652413Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:08:30.653903Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:08:30.658516Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:30.662832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:30.677824Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:30.678044Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:30.731719Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:31.005564Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:31.318355Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:31.431876Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:31.432010Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:08:32.176913Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:12.837459Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:5454:4756], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:09:13.020389Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:5538:4805], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:09:13.058695Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5537:4804] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:13.417086Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:5559:4818]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:13.417473Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:09:13.417591Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:5561:4820] 2026-01-10T12:09:13.417693Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:5561:4820] 2026-01-10T12:09:13.418295Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5562:4821] 2026-01-10T12:09:13.418455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5562:4821], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:09:13.418534Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:09:13.418680Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5561:4820], server id = [2:5562:4821], tablet id = 72075186224037894, status = OK 2026-01-10T12:09:13.418790Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:09:13.418887Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5559:4818], StatRequests.size() = 1 2026-01-10T12:09:13.419045Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:10:36.142077Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwz08x0p3qvzkwzrv27ek1", SessionId: ydb://session/3?node_id=2&id=MzE5YjhiN2MtZWNlNDIyZmUtYzdhYWM4MDQtMzVmMzNlOTQ=, Slow query, duration: 83.399203s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:10:36.143795Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5427:4742], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:10:36.144101Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5427:4742], Start read next stream part 2026-01-10T12:10:36.144587Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37200, txId: 18446744073709551615] shutting down 2026-01-10T12:10:36.144900Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:10:36.149368Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5427:4742], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:10:36.149446Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5427:4742], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTgwODM2MC1mYTJhNzEtOTYxM2QyZjQtZTZiYjljODA=, TxId: 2026-01-10T12:10:36.150163Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:10:36.150296Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], Start read next stream part 2026-01-10T12:10:36.301320Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5773:5011]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:10:36.301655Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:10:36.301709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5773:5011], StatRequests.size() = 1 2026-01-10T12:11:39.403558Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx1hqp440er563abnp2vqm", SessionId: ydb://session/3?node_id=2&id=ZmE4MGY1ZmYtNzNlYmQxMTQtNGE5YmYzNDgtOTY3ZmI0NzQ=, Slow query, duration: 63.248713s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:11:39.407584Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:11:39.407757Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], Start read next stream part 2026-01-10T12:11:39.409787Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37280, txId: 18446744073709551615] shutting down 2026-01-10T12:11:39.410597Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5927:5150], ActorId: [2:5931:5152], Starting query actor #1 [2:5932:5153] 2026-01-10T12:11:39.410681Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5931:5152], ActorId: [2:5932:5153], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:11:39.415465Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:11:39.415598Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5417:2470], ActorId: [2:5747:4989], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzE5NTJkNjEtNGEwZGVlMjUtNjU5MTM5MzEtMmRhZGY1Nw==, TxId: 2026-01-10T12:11:39.416931Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5931:5152], ActorId: [2:5932:5153], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MmU3ODUyZGMtZGI5M2UyYzctODcyMTA1NzAtZDc3N2ZjNzI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:11:39.517100Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5945:5165]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:11:39.517465Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:11:39.517526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5945:5165], StatRequests.size() = 1 2026-01-10T12:11:39.721648Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5931:5152], ActorId: [2:5932:5153], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmU3ODUyZGMtZGI5M2UyYzctODcyMTA1NzAtZDc3N2ZjNzI=, TxId: 2026-01-10T12:11:39.721757Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5931:5152], ActorId: [2:5932:5153], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmU3ODUyZGMtZGI5M2UyYzctODcyMTA1NzAtZDc3N2ZjNzI=, TxId: 2026-01-10T12:11:39.722310Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5927:5150], ActorId: [2:5931:5152], Got response [2:5932:5153] SUCCESS 2026-01-10T12:11:39.723263Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:11:39.758164Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:11:39.758268Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=πEzJ I, ActorId=[1:3775:3509] 2026-01-10T12:11:39.759851Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:5962:4165]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:11:39.760226Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:11:39.760304Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:11:39.760646Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:11:39.760721Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:11:39.760797Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:11:39.774859Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2026-01-10T12:08:22.003469Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703622922233288:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:22.003534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:22.221041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:22.388775Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703623391797356:2237];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:22.388861Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:22.729109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:22.740165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.096253Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.189617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:23.197211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.206899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:23.206951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:23.240999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:23.241100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:23.241228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:23.241258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:23.294527Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:23.294717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:23.305788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:23.394082Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:23.700200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:23.700973Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:23.718662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:23.718691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:23.718698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:23.724212Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:24.504045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:27.004476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703622922233288:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:27.004604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:27.391783Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593703623391797356:2237];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:27.391863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:28.227296Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:28.229030Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:28.229056Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:28.229097Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703648692038261:2326], Start check tables existence, number paths: 2 2026-01-10T12:08:28.231081Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703648692038261:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:28.231140Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703648692038261:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:28.231170Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703648692038261:2326], Successfully finished 2026-01-10T12:08:28.231233Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:28.231267Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MzIzZDI5MzktYmRmNDRhOC05NDk4OWJjZS00MjhkN2M2Yw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MzIzZDI5MzktYmRmNDRhOC05NDk4OWJjZS00MjhkN2M2Yw== temp_dir_name# 8c7fef7e-4d0c-1a79-4de8-cca2154e64df trace_id# 2026-01-10T12:08:28.231558Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MzIzZDI5MzktYmRmNDRhOC05NDk4OWJjZS00MjhkN2M2Yw==, ActorId: [1:7593703648692038282:2332], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:28.231815Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-10T12:08:28.276041Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703648692038289:2816], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:28.285232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:08:28.302079Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703648692038289:2816], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976720658 2026-01-10T12:08:28.308314Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703648692038289:2816], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:08:28.349139Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703648692038289:2816], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:08:28.416372Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703648692038289:2816], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:08:28.450267Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:28.452236Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:28.453178Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7593703649161601256:2299], Start check tables existence, number paths: 2 2026-01-10T12:08:28.457149Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:28.460458Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7593703649161601256:2299], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:28.460526Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7593703649161601256:2299], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:28.460564Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7593703649161601256:2299], Successfully finished 2026-01-10T12:08:28.460708Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:28.460735Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-10T12:08:28.529063Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703648692038367:2875] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/.metadata/workloa ... ad_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:11:38.754336Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ= temp_dir_name# b4bb30e4-4a1b-0b9a-f5c3-b9a4c73ee361 trace_id# 2026-01-10T12:11:38.755688Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:11:38.761746Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7593704465966615467:2324], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:11:38.761844Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7593704465966615467:2324], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:11:38.761892Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7593704465966615467:2324], Successfully finished 2026-01-10T12:11:38.761981Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:11:38.762335Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:11:38.769319Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:38.773132Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-10T12:11:38.775367Z node 14 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-10T12:11:38.785304Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:11:38.876248Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-10T12:11:38.880959Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7593704465966615548:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:38.881178Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7593704465966615497:2507], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-10T12:11:38.881799Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-10T12:11:38.881826Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-10T12:11:38.881908Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615555:2330], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-10T12:11:38.884166Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615555:2330], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-10T12:11:38.884252Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-10T12:11:38.884280Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-10T12:11:38.884674Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7593704465966615564:2331], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-10T12:11:38.886577Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7593704465966615564:2331], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-10T12:11:38.901495Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:11:38.901532Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-10T12:11:38.901592Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:11:38.901639Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615576:2333], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:11:38.901845Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: ReadyState, LegacyTraceId: 01kekx3f0mbjh2ym93592d9940, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-10T12:11:38.904733Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615576:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:38.904888Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:38.905028Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-10T12:11:38.905083Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615585:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-10T12:11:38.905383Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7593704465966615585:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:38.905451Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:38.938749Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:38.944186Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7593704465966615564:2331], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-10T12:11:38.945744Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: ExecuteState, LegacyTraceId: 01kekx3f0mbjh2ym93592d9940, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [14:7593704465966615587:2329] workload_service_cleanup# false trace_id# 2026-01-10T12:11:38.955176Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: CleanupState, LegacyTraceId: 01kekx3f0mbjh2ym93592d9940, EndCleanup is_final# false trace_id# 2026-01-10T12:11:38.955307Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: CleanupState, LegacyTraceId: 01kekx3f0mbjh2ym93592d9940, Sent query response back to proxy proxy_request_id# 3 proxy_id# [14:7593704435901843785:2265] trace_id# 2026-01-10T12:11:38.989478Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:11:38.989549Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:11:38.989590Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:11:38.989636Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:11:38.989730Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=14&id=MzcwYjYxMjItZDRjY2NiZGQtOTc2MDc0YTEtZmYxOTFiYjQ=, ActorId: [14:7593704465966615480:2329], ActorState: unknown state, Session actor destroyed trace_id# |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization [GOOD] >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2026-01-10T12:04:01.050130Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:04:01.072831Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:04:01.073121Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:04:01.073783Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:04:01.074131Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:04:01.076021Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:04:01.076098Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:04:01.077112Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2026-01-10T12:04:01.077157Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:04:01.077277Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:04:01.077425Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:04:01.087761Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:04:01.087808Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:04:01.089319Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.089473Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.089593Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.089708Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.089827Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.089968Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.090116Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.090144Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:04:01.090232Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2026-01-10T12:04:01.090255Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2026-01-10T12:04:01.090288Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:04:01.090337Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:04:01.090985Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:04:01.091045Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:04:01.093764Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:04:01.093924Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:04:01.094260Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:04:01.094500Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:04:01.095182Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2026-01-10T12:04:01.095205Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:04:01.095247Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:04:01.095319Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:04:01.105012Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:04:01.105100Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:04:01.106755Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.106906Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107046Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107166Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107287Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107414Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107532Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:04:01.107554Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:04:01.107617Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2026-01-10T12:04:01.107643Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2026-01-10T12:04:01.107682Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:04:01.107744Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:04:01.108540Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:04:01.109015Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:04:01.109828Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2026-01-10T12:04:01.109889Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.110047Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:04:01.120226Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:04:01.120297Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.122142Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.122205Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:04:01.122769Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.122833Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:04:01.123207Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.123246Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:98:2089] 2026-01-10T12:04:01.123271Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:98:2089] 2026-01-10T12:04:01.123334Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:04:01.123640Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:04:01.123675Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:04:01.123867Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:04:01.124040Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:04:01.124108Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:04:01.124178Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:04:01.124288Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:54:2064] 2026-01-10T12:04 ... The tablet Unknown.65557.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456051Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65556.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456101Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65555.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456134Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65554.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456164Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65553.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456193Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65552.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456223Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65551.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456253Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65550.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456282Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65549.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456311Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65548.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456341Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65547.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456370Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65546.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456400Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65545.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456429Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65544.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456458Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65543.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456488Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65542.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456519Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65541.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456551Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65540.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456580Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65539.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456611Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65538.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456641Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65537.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.456672Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65536.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-10T12:08:16.457730Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} queued, type NKikimr::NHive::TTxProcessPendingOperations 2026-01-10T12:08:16.457805Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:08:16.457908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:08:16.457975Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:08:16.458108Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:514:2351] 2026-01-10T12:08:16.458142Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:514:2351] 2026-01-10T12:08:16.458912Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [1:146:2124] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:08:16.458973Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [1:146:2124] 2026-01-10T12:08:16.459112Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594037927937 Active! Generation: 3, Type: Hive started in 21msec Marker# TSYS24 2026-01-10T12:08:16.459148Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72057594037927937] Activate 2026-01-10T12:08:16.459311Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:514:2351] 2026-01-10T12:08:16.460325Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [1:514:2351] 2026-01-10T12:08:16.460387Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [1:514:2351] 2026-01-10T12:08:16.460523Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [1:101:2093] 2026-01-10T12:08:16.460572Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [1:101:2093] 2026-01-10T12:08:16.460611Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [1:101:2093] 2026-01-10T12:08:16.460723Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [1:514:2351] 2026-01-10T12:08:16.460830Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [1:100:2093] EventType# 268959744 2026-01-10T12:08:16.465460Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [1:514:2351] 2026-01-10T12:08:16.465516Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [1:514:2351] 2026-01-10T12:08:16.465547Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [1:514:2351] 2026-01-10T12:08:16.465599Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [1:431:2280] EventType# 268637702 2026-01-10T12:08:16.465757Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [2:98:2089] 2026-01-10T12:08:16.465794Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [2:98:2089] 2026-01-10T12:08:16.465819Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [2:98:2089] 2026-01-10T12:08:16.465918Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [2:98:2089] 2026-01-10T12:08:16.466736Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-10T12:08:16.466808Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:08:16.468212Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{22, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:08:16.468295Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:08:16.468564Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [2:97:2089] EventType# 268959744 2026-01-10T12:08:16.468705Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-10T12:08:16.468767Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:08:16.469747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:16.470559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:16.470656Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{10, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-10T12:08:16.470737Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:08:16.471636Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-10T12:08:16.471688Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:08:16.471788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:16.471849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:16.471911Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{11, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-10T12:08:16.471953Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:08:16.472119Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-10T12:08:16.472168Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:08:16.472245Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{12, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:08:16.472297Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} Took 61.077899 seconds >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> PgCatalog::CheckSetConfig [FAIL] >> PgCatalog::PgDatabase+useSink >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> TConfigsDispatcherTests::TestSubscriptionNotification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] Test command err: 2026-01-10T12:11:43.646259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:43.646308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:43.873289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:45.402917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:45.402986Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:45.560225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2026-01-10T12:11:41.010285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704480242777293:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:41.010402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:41.321470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:41.335909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:41.336005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:41.353590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:41.436668Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:41.688119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:41.882931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:41.882957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:41.882969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:41.883051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:42.027748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:42.504409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:42.691904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2026-01-10T12:11:42.732859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |87.4%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> KqpSnapshotRead::TestReadOnly+withSink |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> KqpSinkMvcc::LostUpdate+IsOlap >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> KqpSinkTx::SnapshotRO >> KqpSinkMvcc::WriteSkewInsert+IsOlap >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] >> KqpPg::TempTablesDrop [FAIL] >> KqpPg::TempTablesWithCache |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2026-01-10T12:09:14.766480Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1768046954766445 2026-01-10T12:09:15.520655Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703849823829352:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:15.520738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:15.534145Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005381s 2026-01-10T12:09:15.532239Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007711s 2026-01-10T12:09:15.587445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:15.712263Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:15.819279Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:09:16.275477Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.275652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:16.456993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.488206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:09:16.544339Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.548316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:16.731547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.731630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.739879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:16.739971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:16.781993Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:09:16.784156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.792492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:16.813601Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:16.876498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:16.922976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:16.993157Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:17.431967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00229f/r3tmp/yandexJQ21Mz.tmp 2026-01-10T12:09:17.431993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00229f/r3tmp/yandexJQ21Mz.tmp 2026-01-10T12:09:17.432195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00229f/r3tmp/yandexJQ21Mz.tmp 2026-01-10T12:09:17.432287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:17.580539Z INFO: TTestServer started on Port 30416 GrpcPort 29661 PQClient connected to localhost:29661 2026-01-10T12:09:18.144041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:20.522639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703849823829352:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:20.522730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:09:22.170659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703879888601599:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.170905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.171527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703879888601636:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.171605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703879888601637:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.171760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:22.176541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:22.241791Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703879888601640:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-10T12:09:22.600957Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703879888601734:2982] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:22.649782Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703883471321949:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:22.651736Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=N2NjMjUzMTctYjc0MjQxNDQtOWQyMjIzMjktZjZhNWFkZTA=, ActorId: [2:7593703883471321906:2300], ActorState: ExecuteState, LegacyTraceId: 01kekwz9ma2aahbctjne28k7ax, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:22.648983Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703879888601748:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:09:22.651465Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjUwMThiYWMtYWVlZTFlNDgtNTMxNDZkYzktN2JjYmM3NmM=, ActorId: [1:7593703879888601595:2330], ActorState: ExecuteState, LegacyTraceId: 01kekwz9fqcfdmmj1twy4ettcf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:09:22.654634Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." ... 4037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.393822Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:11:44.393917Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2026-01-10T12:11:44.553615Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2026-01-10T12:11:44.553686Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:11:44.553697Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:11:44.553710Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:44.554100Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 160 WTime 1768047104553 2026-01-10T12:11:44.554333Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037892][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:11:44.554357Z node 4 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:11:44.554456Z node 4 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2026-01-10T12:11:44.565133Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7593703973239170783:2384] 2026-01-10T12:11:44.565214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:11:44.565282Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:11:44.565319Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:11:44.565354Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2026-01-10T12:11:44.565513Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:44.565529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.565541Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.565560Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.565571Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:44.565603Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:11:44.565638Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2026-01-10T12:11:44.565664Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2026-01-10T12:11:44.566121Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 160 2026-01-10T12:11:44.572171Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-10T12:11:44.576636Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:11:44.576847Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 11000000 } min_queue_wait_time { nanos: 160000000 } max_queue_wait_time { nanos: 160000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:11:44.576883Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2026-01-10T12:11:44.576914Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session: acknoledged message 1 2026-01-10T12:11:44.584172Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0 grpc read done: success: 0 data: 2026-01-10T12:11:44.584208Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0 grpc read failed 2026-01-10T12:11:44.584243Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0 grpc closed 2026-01-10T12:11:44.584257Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0 is DEAD 2026-01-10T12:11:44.584721Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:11:44.585220Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7593704407257884586:3162] destroyed 2026-01-10T12:11:44.585272Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:11:44.585300Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:44.585314Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.585329Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.585344Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.585355Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:44.589683Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2026-01-10T12:11:44.589783Z :ERROR: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2026-01-10T12:11:44.589822Z :ERROR: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2026-01-10T12:11:44.589858Z :INFO: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session will now close 2026-01-10T12:11:44.589940Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session: aborting 2026-01-10T12:11:44.625892Z :DEBUG: [/Root] TraceId [] SessionId [src_id|7f4eef91-53fb1189-715f22c-3e3f7470_0] MessageGroupId [src_id] Write session: destroy 2026-01-10T12:11:44.654996Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:44.655034Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.655048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.655073Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.655089Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:44.756529Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:44.756564Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.756576Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.756596Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.756610Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:44.908215Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:11:44.908246Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.908257Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:11:44.908274Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:11:44.908285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:11:45.416726Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [3:7593704497452198617:3299] TxId: 281474976710764. Ctx: { TraceId: 01kekx3msk3x0vy96271s6jsnc, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NDg2MWUzMGItYTFlYmJiODEtZDkwYjQ2ZGQtZTlmOTI4NjM=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-10T12:11:45.417474Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593704497452198621:3299], TxId: 281474976710764, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx3msk3x0vy96271s6jsnc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NDg2MWUzMGItYTFlYmJiODEtZDkwYjQ2ZGQtZTlmOTI4NjM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593704497452198617:3299], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 |87.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2026-01-10T12:11:40.859245Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704473016396576:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:40.859616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:40.951281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:41.310585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:41.310726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:41.314379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:41.344878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:41.382234Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:41.429945Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:11:41.429985Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:11:41.642041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:41.655725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:41.655751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:41.655759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:41.655867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:41.858537Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:42.059201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:42.158888Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7593704481606332193:2302], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48310" } 2026-01-10T12:11:42.158938Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-10T12:11:42.158976Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.159003Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.159118Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48310" 2026-01-10T12:11:42.159275Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1768047102158962) 2026-01-10T12:11:42.190977Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1768047102158962 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-10T12:11:42.191494Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-10T12:11:42.196875Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-10T12:11:42.197961Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102158962&action=1" } } } 2026-01-10T12:11:42.200233Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.200333Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:11:42.200521Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:11:42.201045Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-10T12:11:42.201247Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:11:42.204415Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593704481606332203:2303], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102158962&action=1" } UserToken: "" } 2026-01-10T12:11:42.204443Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:11:42.204685Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102158962&action=1" } } 2026-01-10T12:11:42.213174Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-10T12:11:42.213243Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.213311Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7593704481606332198:2195], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.213340Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.213366Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.213376Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.213422Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-10T12:11:42.213440Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-10T12:11:42.213508Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-10T12:11:42.218030Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:11:42.218061Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.218071Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.218086Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.218144Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-10T12:11:42.218166Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1768047102158962 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:42.225176Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:11:42.235727Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.235816Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-10T12:11:42.235827Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-10T12:11:42.243491Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:48310" 2026-01-10T12:11:42.246048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:11:42.248964Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-10T12:11:42.249049Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-01-10T12:11:42.254950Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2026-01-10T12:11:42.265793Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: ... - using supplied 72075186224037889 2026-01-10T12:11:43.321309Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-01-10T12:11:43.322192Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2026-01-10T12:11:43.322240Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:11:43.322316Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.322436Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7593704485901300140:2195], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.322473Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.322492Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.322508Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.322548Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-10T12:11:43.322575Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1768047103245352 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.322626Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047103245352 issue= 2026-01-10T12:11:43.324753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-10T12:11:43.324839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-10T12:11:43.324905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-10T12:11:43.324970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-10T12:11:43.325025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-10T12:11:43.329162Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:11:43.331482Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-10T12:11:43.331566Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-10T12:11:43.331587Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.331938Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593704473016396853:2200], Recipient [1:7593704477311364260:2195]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:11:43.331966Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:11:43.331990Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.331999Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.332021Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-10T12:11:43.332036Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1768047103245352 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.336861Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:11:43.339256Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.339328Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:11:43.339472Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:11:43.340187Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-10T12:11:43.340313Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-10T12:11:43.343665Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-10T12:11:43.343773Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7593704485901300268:2195], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-10T12:11:43.343815Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-10T12:11:43.343834Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.343843Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.343890Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-10T12:11:43.343911Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-10T12:11:43.345795Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:43.349064Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:11:43.349121Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.349131Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.349137Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.349200Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1768047103245352 2026-01-10T12:11:43.349216Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047103245352 issue= 2026-01-10T12:11:43.349227Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1768047103245352 issue= 2026-01-10T12:11:43.349235Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-10T12:11:43.349309Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1768047103245352 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.352112Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-10T12:11:43.352202Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.376342Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593704485901300293:2379], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047103245352&action=2" } UserToken: "" } 2026-01-10T12:11:43.376370Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:11:43.376521Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047103245352&action=2" ready: true status: SUCCESS } } 2026-01-10T12:11:43.397816Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593704485901300296:2381], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:48310" } 2026-01-10T12:11:43.397853Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:11:43.398010Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-10T12:11:43.400389Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7593704485901300299:2382], Recipient [1:7593704477311364260:2195]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:48310" } 2026-01-10T12:11:43.400412Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-10T12:11:43.400649Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-10T12:11:43.413211Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:11:43.413430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:11:47.332252Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704481973532642:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:47.332357Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; |87.5%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] Test command err: 2026-01-10T12:11:47.970028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:47.970091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:48.260659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2026-01-10T12:11:40.704883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704474618608005:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:40.704931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:40.851359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:41.344189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:41.375799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:41.375917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:41.395062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:41.499947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:41.517997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:41.804400Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:41.939261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:41.939285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:41.939292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:41.939395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:42.425511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:42.554269Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7593704483208543703:2302], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:11:42.554318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-10T12:11:42.554334Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.554347Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.554465Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:39602" 2026-01-10T12:11:42.554614Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1768047102554285) 2026-01-10T12:11:42.616502Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1768047102554285 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-10T12:11:42.616767Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-10T12:11:42.622314Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-10T12:11:42.623136Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102554285&action=1" } } } 2026-01-10T12:11:42.623300Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.623369Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:11:42.623538Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:11:42.624130Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-10T12:11:42.624265Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:11:42.625377Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7593704483208543703:2302], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102554285&action=1" } UserToken: "" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:11:42.625408Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2026-01-10T12:11:42.625649Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3434: Add subscription to /Root/users/user-1 for [1:7593704483208543703:2302] 2026-01-10T12:11:42.625723Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3442: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047102554285&action=1" } } 2026-01-10T12:11:42.634259Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-10T12:11:42.634321Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.634420Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7593704483208543708:2202], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.634461Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-10T12:11:42.634475Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.634483Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.634523Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-10T12:11:42.634544Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-10T12:11:42.634611Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-10T12:11:42.639145Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:11:42.639182Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:42.639189Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.639201Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:42.639266Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-10T12:11:42.639286Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1768047102554285 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:42.646286Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:11:42.658845Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:42.663021Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-10T12:11:42.665221Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-10T12:11:42.672767Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:39602" 2026-01-10T12:11:42.674618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:11:42.678525Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-10T12:11:42.678607Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-10T12:11:42.682935Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-10T12:11:42.713647Z node 1 :CMS_TENANTS DEBUG: console_tenants_ma ... 186224037888 shardIdx 72057594046644480:2 2026-01-10T12:11:43.892412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2026-01-10T12:11:43.892465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-01-10T12:11:43.892620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-10T12:11:43.894456Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2026-01-10T12:11:43.894490Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:11:43.894561Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.894728Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7593704487503511634:2202], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.894756Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:11:43.894784Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.894801Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.894837Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-10T12:11:43.894863Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1768047103837376 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.894937Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047103837376 issue= 2026-01-10T12:11:43.908888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-10T12:11:43.908995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-10T12:11:43.909085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-10T12:11:43.909152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-10T12:11:43.909248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-10T12:11:43.913833Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:11:43.914212Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-10T12:11:43.915870Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-10T12:11:43.915911Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.916192Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593704478913575703:2220], Recipient [1:7593704478913575792:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:11:43.916231Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:11:43.916259Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.916271Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.916298Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-10T12:11:43.916327Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1768047103837376 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.921629Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:11:43.921707Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.921750Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:11:43.921943Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:11:43.922571Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-10T12:11:43.922653Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-10T12:11:43.926126Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-10T12:11:43.926220Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7593704487503511765:2202], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-10T12:11:43.926260Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-10T12:11:43.926274Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.926290Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.926337Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-10T12:11:43.926358Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-10T12:11:43.932546Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [d1e6b3e7be7931c1] Result# TEvPutResult {Id# [72075186224037888:1:16:0:0:121:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2026-01-10T12:11:43.932724Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.239 sample PartId# [72075186224037888:1:16:0:0:121:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2026-01-10T12:11:43.969706Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:11:43.972147Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:11:43.972182Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:43.972189Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:11:43.972284Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1768047103837376 2026-01-10T12:11:43.972301Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047103837376 issue= 2026-01-10T12:11:43.972326Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1768047103837376 issue= 2026-01-10T12:11:43.972337Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-10T12:11:43.972428Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1768047103837376 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:11:43.989404Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-10T12:11:43.996611Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2422: Send /Root/users/user-1 notification to [1:7593704487503511610:2371]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047103837376&action=2" ready: true status: SUCCESS } } 2026-01-10T12:11:44.001903Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:11:44.030088Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593704491798479085:2373], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:11:44.030125Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:11:44.030264Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-10T12:11:44.035149Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7593704491798479088:2374], Recipient [1:7593704478913575792:2202]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:11:44.035188Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-10T12:11:44.035425Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-10T12:11:44.040627Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:11:44.040853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |87.5%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> KqpTx::RollbackManyTx |87.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] Test command err: 2026-01-10T12:11:47.866397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:47.866457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:48.164088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:50.105649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:50.105721Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:50.264450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |87.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |87.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 |87.6%| [TA] $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::ExplicitTcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2026-01-10T12:11:35.419280Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704454665709032:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:35.419332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:35.968288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:36.034963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:36.035056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:36.049837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:36.127112Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:36.244185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:36.424248Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:36.424318Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704454665709161:2140], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:36.424675Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593704454665709161:2140], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-10T12:11:36.425274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:11:36.428062Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704454665708807:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704458960676918:2443] 2026-01-10T12:11:36.428133Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704454665708807:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:36.428239Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704454665708807:2051] Subscribe: subscriber# [1:7593704458960676918:2443], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:36.428327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704454665708810:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704458960676919:2443] 2026-01-10T12:11:36.428334Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704454665708810:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:36.428384Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704454665708810:2054] Subscribe: subscriber# [1:7593704458960676919:2443], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:36.428416Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593704454665708813:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7593704458960676920:2443] 2026-01-10T12:11:36.428422Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593704454665708813:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-10T12:11:36.428440Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593704454665708813:2057] Subscribe: subscriber# [1:7593704458960676920:2443], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:11:36.428591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704458960676918:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704454665708807:2051] 2026-01-10T12:11:36.428618Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704458960676919:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704454665708810:2054] 2026-01-10T12:11:36.428638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593704458960676920:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704454665708813:2057] 2026-01-10T12:11:36.428673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704458960676915:2443] 2026-01-10T12:11:36.428716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704458960676916:2443] 2026-01-10T12:11:36.428764Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7593704454665709161:2140], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:36.428790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7593704458960676917:2443] 2026-01-10T12:11:36.428815Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7593704458960676914:2443][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7593704454665709161:2140], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:11:36.428874Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704454665709155:2138] Handle TEvNavigate describe path dc-1 2026-01-10T12:11:36.428953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704454665708807:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704458960676918:2443] 2026-01-10T12:11:36.428986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704454665708810:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704458960676919:2443] 2026-01-10T12:11:36.428998Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593704454665708813:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593704458960676920:2443] 2026-01-10T12:11:36.429075Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704454665709161:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-10T12:11:36.429158Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704454665709161:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593704458960676914:2443] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:11:36.432181Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704458960676922:2445] HANDLE EvNavigateScheme dc-1 2026-01-10T12:11:36.437279Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704454665709161:2140], cacheItem# { Subscriber: { Subscriber: [1:7593704458960676914:2443] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:36.437386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704454665709161:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:36.437491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704454665709410:2301][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704454665709161:2140], cookie# 1 2026-01-10T12:11:36.437588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704454665709458:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704454665709455:2301], cookie# 1 2026-01-10T12:11:36.437605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704454665709459:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704454665709456:2301], cookie# 1 2026-01-10T12:11:36.437643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704454665709460:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704454665709457:2301], cookie# 1 2026-01-10T12:11:36.437677Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704454665708807:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704454665709458:2301], cookie# 1 2026-01-10T12:11:36.437754Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704458960676923:2446], recipient# [1:7593704458960676913:2287], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446 ... nt: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:49.480720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704510957056050:5461], recipient# [3:7593704510957056048:4637], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:49.855184Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704459417438917:2227], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:49.855319Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704459417438917:2227], cacheItem# { Subscriber: { Subscriber: [3:7593704476597308259:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:49.855412Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704510957056052:5462], recipient# [3:7593704510957056051:4638], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.232806Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704486331511521:2228], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.232965Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704486331511521:2228], cacheItem# { Subscriber: { Subscriber: [2:7593704512101315558:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:50.233068Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704516396282982:2351], recipient# [2:7593704516396282981:2555], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.260375Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704486331511521:2228], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.260549Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704486331511521:2228], cacheItem# { Subscriber: { Subscriber: [2:7593704512101315558:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:50.260615Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704486331511521:2228], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.260672Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704486331511521:2228], cacheItem# { Subscriber: { Subscriber: [2:7593704512101315558:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:50.260715Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704486331511521:2228], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.260790Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704486331511521:2228], cacheItem# { Subscriber: { Subscriber: [2:7593704512101315559:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:50.260832Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593704486331511521:2228], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.260905Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593704486331511521:2228], cacheItem# { Subscriber: { Subscriber: [2:7593704512101315558:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:11:50.260977Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704516396282988:2353], recipient# [2:7593704516396282984:2557], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.261039Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704516396282989:2354], recipient# [2:7593704516396282985:2558], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.261085Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704516396282990:2355], recipient# [2:7593704516396282986:2559], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:11:50.261177Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593704516396282987:2352], recipient# [2:7593704516396282983:2556], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> KqpTx::RollbackTx >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> KqpTx::CommitRequired >> KqpLocks::Invalidate >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink |87.6%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAttributes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> TConsoleTests::TestAttributesExtSubdomain >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> TableCreation::SimpleUpdateTable [GOOD] >> TableCreation::RollbackTableAcl >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2026-01-10T12:10:27.726437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:27.726513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:28.080617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:29.110778Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:29.111358Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:29.112052Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6415960466126660295 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:29.282017Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:10:29.287038Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:10:29.287217Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:10:29.405743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:10:29.584668Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:29.585259Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:29.585545Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmp17nk8D/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13630162886924848666 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunk ... Weight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:43.980681Z node 144 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:11:43.981234Z node 144 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:11:43.981394Z node 144 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7193847911497366837 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:44.021781Z node 138 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:11:44.022396Z node 138 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:11:44.022634Z node 138 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2535885028227822744 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:44.081771Z node 137 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:11:44.082309Z node 137 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:11:44.082528Z node 137 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpkr8vKG/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12151890771274839377 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:44.551553Z node 136 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:44.551657Z node 136 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:44.570288Z node 136 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-10T12:11:49.369087Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:49.369223Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:49.490227Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:50.553697Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:11:50.554373Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpiRsGkX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:11:50.554673Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpiRsGkX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019a4/r3tmp/tmpiRsGkX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12700937172437801517 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:53.355156Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:53.355252Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:53.480860Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> IncrementalBackup::ComplexBackupSequenceWithDataVerification [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault >> KqpStreamLookup::ReadTableDuringSplit [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |87.6%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::TestReadOnly-withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |87.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::TempTablesWithCache [FAIL] >> KqpPg::TableDeleteWhere+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:11:44.723573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:44.855696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:44.868330Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:44.869007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:44.869178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:45.435462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:45.435639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:45.442999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047100983163 != 1768047100983166 2026-01-10T12:11:45.464021Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:45.529884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:45.651567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:46.180421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:46.197082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:46.324329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:46.846376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:46.846696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:964:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:46.847332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:46.848343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:969:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:46.848603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:46.865885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:47.047956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:47.244981Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1025:2858] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:57.605653Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx3prnemced1e67zezrsyv", SessionId: ydb://session/3?node_id=1&id=Y2I2N2U0OTAtYWFkOGVlZmUtOTRkNWI2ZWQtMzFiMTY0Mzc=, Slow query, duration: 10.767804s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::InteractiveTx >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:11:45.578021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:45.759821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:45.769163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:45.769648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:45.769770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:46.569272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:46.569459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:46.591619Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047102132314 != 1768047102132317 2026-01-10T12:11:46.621881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:46.697617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:46.842957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:47.220257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:47.237557Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:47.366493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:47.762467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1000:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:47.762603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:47.762677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:47.763778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1016:2858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:47.764204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:47.773033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:47.950056Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1014:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:48.057301Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1073:2896] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:03.437226Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx3qnfdzxyqx0cvjsw40fq", SessionId: ydb://session/3?node_id=1&id=OGIxZjNiOWMtYzkxZDhkODgtNGRkMjBkOC1hMDdlNTZmMA==, Slow query, duration: 15.676898s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> KqpLocks::Invalidate [GOOD] >> KqpLocks::DifferentKeyUpdate |87.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink [GOOD] >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> KqpTx::DeferredEffects >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> TableCreation::RollbackTableAcl [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> ExternalIndex::Simple [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::RollbackTableAcl [GOOD] Test command err: 2026-01-10T12:11:35.417912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704454404853506:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:35.418132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:35.872235Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704454404853390:2081] 1768047095360484 != 1768047095360487 2026-01-10T12:11:35.895064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:35.895184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:35.931067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:35.950088Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:36.344354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:36.344377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:36.344384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:36.344454Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:36.411557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:36.417752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:11:36.455466Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:39.430499Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:11:39.442540Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:11:39.442602Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:11:39.442624Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:11:39.449785Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.449817Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Creating table 2026-01-10T12:11:39.449869Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:11:39.449953Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.449971Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Creating table 2026-01-10T12:11:39.450001Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:11:39.450921Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.450929Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Creating table 2026-01-10T12:11:39.450964Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:11:39.455368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.470772Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:11:39.470827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Subscribe on create table tx: 281474976715659 2026-01-10T12:11:39.473147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.484509Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Subscribe on tx: 281474976715659 registered 2026-01-10T12:11:39.484608Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:11:39.484631Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Subscribe on create table tx: 281474976715658 2026-01-10T12:11:39.497496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.505221Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Subscribe on tx: 281474976715658 registered 2026-01-10T12:11:39.505365Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:11:39.505387Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Subscribe on create table tx: 281474976715660 2026-01-10T12:11:39.508063Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Subscribe on tx: 281474976715660 registered 2026-01-10T12:11:39.683330Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2026-01-10T12:11:39.750814Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2026-01-10T12:11:39.761436Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-10T12:11:39.761499Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Column diff is empty, finishing 2026-01-10T12:11:39.762434Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:11:39.763471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:39.766507Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:11:39.766525Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7593704471584723408:2493] Owner: [1:7593704471584723402:2490]. Successful alter request: ExecComplete 2026-01-10T12:11:39.769429Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593704471584723407:2492] Owner: [1:7593704471584723402:2490]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2026-01-10T12:11:39.820480Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-10T12:11:39.820577Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Column diff is empty, finishing 2026-01-10T12:11:39.820679Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704471584723406:2491] Owner: [1:7593704471584723402:2490]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:11:39.821705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:39.823523Z node 1 :K ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-10T12:12:09.179877Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTEzMTFmYTctZDgyNzNiOGMtYTJkMDQxZWQtNmYxNmFlZDQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7593704600558338201:2522] 2026-01-10T12:12:09.179906Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7593704600558338203:2963] 2026-01-10T12:12:09.192654Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7593704600558338202:2523], selfId: [3:7593704549018729118:2260], source: [3:7593704600558338201:2522] 2026-01-10T12:12:09.193535Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704600558338198:2961], ActorId: [3:7593704600558338199:2962], TraceId: ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=OTEzMTFmYTctZDgyNzNiOGMtYTJkMDQxZWQtNmYxNmFlZDQ=, TxId: 2026-01-10T12:12:09.194185Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704600558338198:2961], ActorId: [3:7593704600558338199:2962], TraceId: ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=OTEzMTFmYTctZDgyNzNiOGMtYTJkMDQxZWQtNmYxNmFlZDQ=, TxId: 2026-01-10T12:12:09.194216Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704600558338198:2961], ActorId: [3:7593704600558338199:2962], TraceId: ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-10T12:12:09.194343Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704600558338197:2960], ActorId: [3:7593704600558338198:2961], TraceId: ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7, RequestDatabase: /dc-1, Got response [3:7593704600558338199:2962] SUCCESS 2026-01-10T12:12:09.194411Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704600558338196:2959] ActorId: [3:7593704600558338197:2960] Database: /dc-1 ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7. Extracted script execution operation [3:7593704600558338199:2962], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7593704579083501075:2676], LeaseGeneration: 0 2026-01-10T12:12:09.194435Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704600558338196:2959] ActorId: [3:7593704600558338197:2960] Database: /dc-1 ExecutionId: 75ccf627-eda463ce-a1f76934-c967dcd7. Reply success 2026-01-10T12:12:09.200255Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OTEzMTFmYTctZDgyNzNiOGMtYTJkMDQxZWQtNmYxNmFlZDQ=, workerId: [3:7593704600558338201:2522], local sessions count: 0 2026-01-10T12:12:09.249542Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx4cn0ekpppmag2v54kn1y", Request has 18444976026580.302102s seconds to be completed 2026-01-10T12:12:09.251899Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx4cn0ekpppmag2v54kn1y", Created new session, sessionId: ydb://session/3?node_id=3&id=MjBjMjA4ZDItNWRlMjdkMy1hNzNmZWNlOC0yNjQ4M2VmNQ==, workerId: [3:7593704600558338232:2536], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:12:09.252140Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx4cn0ekpppmag2v54kn1y 2026-01-10T12:12:09.273215Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx4cnrfnycjgp0g4c235d4, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MjBjMjA4ZDItNWRlMjdkMy1hNzNmZWNlOC0yNjQ4M2VmNQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7593704600558338232:2536] 2026-01-10T12:12:09.273269Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7593704600558338235:2972] 2026-01-10T12:12:09.288612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:12:09.295623Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx4cnrfnycjgp0g4c235d4", Forwarded response to sender actor, requestId: 42, sender: [3:7593704600558338234:2537], selfId: [3:7593704549018729118:2260], source: [3:7593704600558338232:2536] --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:12:09.317298Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Describe result: PathErrorUnknown 2026-01-10T12:12:09.317323Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Creating table 2026-01-10T12:12:09.317382Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-10T12:12:09.333879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:09.337236Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-10T12:12:09.337285Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Subscribe on create table tx: 281474976710687 2026-01-10T12:12:09.342658Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Subscribe on tx: 281474976710687 registered 2026-01-10T12:12:09.406136Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-01-10T12:12:09.472420Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:12:09.472461Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593704600558338246:2979] Owner: [3:7593704600558338245:2978]. Column diff is empty, finishing 2026-01-10T12:12:09.512499Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx4cx32fbp3ntgkcd88r8c", Request has 18444976026580.039153s seconds to be completed 2026-01-10T12:12:09.515104Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx4cx32fbp3ntgkcd88r8c", Created new session, sessionId: ydb://session/3?node_id=3&id=Y2ZmNWEzZmEtODY2MjJhNzctMzgwMzZkNmEtZGUxYjExNDY=, workerId: [3:7593704600558338375:2546], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:12:09.515317Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx4cx32fbp3ntgkcd88r8c 2026-01-10T12:12:09.550291Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593704600558338381:3083] Owner: [3:7593704600558338380:3082]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:12:09.550327Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593704600558338381:3083] Owner: [3:7593704600558338380:3082]. Column diff is empty, finishing 2026-01-10T12:12:09.550400Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7593704600558338381:3083] Owner: [3:7593704600558338380:3082]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-10T12:12:09.551292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:12:09.553046Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593704600558338381:3083] Owner: [3:7593704600558338380:3082]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:12:09.553060Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7593704600558338381:3083] Owner: [3:7593704600558338380:3082]. Successful alter request: ExecComplete 2026-01-10T12:12:09.571702Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=Y2ZmNWEzZmEtODY2MjJhNzctMzgwMzZkNmEtZGUxYjExNDY=, workerId: [3:7593704600558338375:2546], local sessions count: 1 2026-01-10T12:12:09.573771Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx4cz50hs4jf71b9n693xp", Request has 18444976026579.977874s seconds to be completed 2026-01-10T12:12:09.576203Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx4cz50hs4jf71b9n693xp", Created new session, sessionId: ydb://session/3?node_id=3&id=ZDUyZTNiODgtYzYwZDQ2MTgtY2UwYmUzYzEtMWE5OGEzNGQ=, workerId: [3:7593704600558338393:2550], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:12:09.576425Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx4cz50hs4jf71b9n693xp 2026-01-10T12:12:09.619863Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZDUyZTNiODgtYzYwZDQ2MTgtY2UwYmUzYzEtMWE5OGEzNGQ=, workerId: [3:7593704600558338393:2550], local sessions count: 1 2026-01-10T12:12:09.647676Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MjBjMjA4ZDItNWRlMjdkMy1hNzNmZWNlOC0yNjQ4M2VmNQ==, workerId: [3:7593704600558338232:2536], local sessions count: 0 |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::SnapshotROInteractive2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:07:33.556219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:07:33.813722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:07:33.894143Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:289:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:07:33.895141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:07:33.895217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2026-01-10T12:07:33.895343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:07:35.134657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:35.134829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:35.477893Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046848353027 != 1768046848353031 2026-01-10T12:07:35.549579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:35.636014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:35.855142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:07:38.513591Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-10T12:07:38.513919Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-10T12:07:38.515218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:38.520224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:38.520312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:38.527778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:38.528386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:38.564681Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:38.784836Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-01-10T12:07:38.784928Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:07:38.788273Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:907:2769] 2026-01-10T12:07:38.983556Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:907:2769] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "new_column1" Type: "Uint64" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:07:38.983683Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:907:2769] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:07:38.987213Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:07:38.989074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:907:2769] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:07:38.989748Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:07:38.989976Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:907:2769] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:07:38.990142Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:907:2769] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:07:39.010705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-10T12:07:39.016294Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:907:2769] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:07:39.017311Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:907:2769] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:07:39.017405Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:907:2769] txid# 281474976715657 SEND to# [1:906:2768] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:07:39.215193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:07:39.256421Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:07:39.256834Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-10T12:07:39.280240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:07:39.280666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:07:39.281036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:07:39.281196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:07:39.281368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:07:39.281524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:07:39.281636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:07:39.281753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:07:39.281900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:07:39.282058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:07:39.282183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:07:39.282318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:07:39.282468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:07:39.320897Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-01-10T12:07:39.321554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:07:39.321643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:07:39.321966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: ... olumn: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-10T12:11:34.041530Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:34.041630Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716216 ProcessProposeKqpTransaction 2026-01-10T12:11:34.055413Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:34.055498Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716217 ProcessProposeKqpTransaction 2026-01-10T12:11:34.223099Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:34.223185Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716218 ProcessProposeKqpTransaction 2026-01-10T12:11:34.247570Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:34.247651Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716219 ProcessProposeKqpTransaction 2026-01-10T12:11:34.461648Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:11:34.461814Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:969:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:11:34.461885Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2819];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:11:34.461958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:978:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;EXPECTATION=0 2026-01-10T12:11:44.609854Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:11251:10591], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:11:44.614684Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTI2MDRiZGYtYjBiNjM3MTktZWMyMzM5YzQtODg5ZjRjMTQ=, ActorId: [1:11247:10588], ActorState: ExecuteState, LegacyTraceId: 01kekx3mhw20y0n1j7a6hnaszp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-10T12:11:45.448666Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:45.448779Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716220 ProcessProposeKqpTransaction 2026-01-10T12:11:45.474206Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:45.474297Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716221 ProcessProposeKqpTransaction 2026-01-10T12:11:45.708065Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:45.708317Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716222 ProcessProposeKqpTransaction 2026-01-10T12:11:45.720170Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:45.720248Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716223 ProcessProposeKqpTransaction 2026-01-10T12:11:45.933652Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:11:45.933829Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:969:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:11:45.933904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2819];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:11:45.933981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:978:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;EXPECTATION=0 2026-01-10T12:11:56.925654Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:56.925762Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716224 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;RESULT=;EXPECTATION=1 2026-01-10T12:11:57.627521Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:57.627599Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716225 ProcessProposeKqpTransaction 2026-01-10T12:11:57.647424Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:57.647505Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716226 ProcessProposeKqpTransaction 2026-01-10T12:11:57.921490Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:57.921583Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716227 ProcessProposeKqpTransaction 2026-01-10T12:11:57.966102Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:11:57.966193Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716228 ProcessProposeKqpTransaction 2026-01-10T12:11:58.198629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:11:58.198820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:969:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:11:58.198889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2819];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:11:58.198960Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:978:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;EXPECTATION=1 2026-01-10T12:12:08.965634Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:12:08.965737Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716229 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 2026-01-10T12:12:09.833911Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:12:09.833995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716230 ProcessProposeKqpTransaction 2026-01-10T12:12:09.868299Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:12:09.868383Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716231 ProcessProposeKqpTransaction 2026-01-10T12:12:10.167935Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:12:10.168018Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716232 ProcessProposeKqpTransaction 2026-01-10T12:12:10.204926Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-10T12:12:10.205009Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976716233 ProcessProposeKqpTransaction 2026-01-10T12:12:10.413929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:964:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:12:10.414110Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:969:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:12:10.414182Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2819];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:12:10.414246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:978:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |87.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpSinkMvcc::WriteSkewInsert+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpTx::RollbackTx2 [GOOD] >> KqpTx::SnapshotRO ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] Test command err: 2026-01-10T12:10:22.562826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:22.562897Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:22.881372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:24.397041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:10:24.654521Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.655145Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.655902Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9339432659803737462 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.660130Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:10:24.783822Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.784407Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.784649Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1087553967956024704 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.849870Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.850535Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.850848Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8628959375876853376 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.911919Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.916526Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.920282Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmp415dNv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2895903405599531435 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInF ... st: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-10T12:10:51.161042Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1612:2838] txid# 281474976715660, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-10T12:10:51.161645Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-10T12:10:51.207132Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1671:2882] txid# 281474976715661, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-10T12:10:51.207569Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-10T12:10:51.297659Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1696:2904] txid# 281474976715662, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-10T12:10:51.298077Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-10T12:10:53.827528Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:53.827598Z node 55 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:53.973239Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:57.663904Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:57.663978Z node 64 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:57.814336Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:01.343042Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:01.343147Z node 73 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:01.462424Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:04.903976Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:04.904068Z node 82 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:05.050321Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:06.050243Z node 89 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:11:06.051020Z node 89 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmpAs49Dw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:11:06.051324Z node 89 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmpAs49Dw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0019c9/r3tmp/tmpAs49Dw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11585702969553584170 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:11:55.363397Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:55.363495Z node 91 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:55.514112Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:57.566449Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:57.566553Z node 92 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:57.777999Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:00.173231Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:00.173350Z node 93 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:00.314440Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:02.482859Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:02.482965Z node 94 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:02.821971Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:05.261160Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:05.261279Z node 95 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:05.398075Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:07.152613Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:07.152727Z node 96 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:07.274735Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:09.030478Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:09.030585Z node 97 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:09.237487Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:11.315431Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:11.317406Z node 98 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:11.496760Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> TBackupCollectionTests::DisallowedPath >> KqpSinkMvcc::LostUpdate+IsOlap [GOOD] >> KqpSinkMvcc::LostUpdate-IsOlap >> TBackupCollectionTests::HiddenByFeatureFlag >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::ParallelCreate >> KqpTx::RollbackRoTx [GOOD] >> KqpTx::RollbackInvalidated >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::DropTwice |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit [GOOD] >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkLocks::EmptyRange >> KqpTx::CommitRoTx_TLI [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpTx::InvalidateOnError [GOOD] >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::Drop >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink [GOOD] >> KqpRollback::DoubleUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] Test command err: 2026-01-10T12:11:35.259870Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704453925318731:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:35.259964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:35.505940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:35.506056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:35.624250Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704453925318476:2081] 1768047095175850 != 1768047095175853 2026-01-10T12:11:35.637873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:35.699158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:36.061322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:36.061360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:36.061369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:36.061483Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:36.129363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:36.140123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:36.260272Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:38.989978Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:11:39.002266Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:11:39.002322Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:11:39.002341Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:11:39.016840Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.016867Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Creating table 2026-01-10T12:11:39.016924Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:11:39.017179Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.017186Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Creating table 2026-01-10T12:11:39.017204Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:11:39.020292Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Describe result: PathErrorUnknown 2026-01-10T12:11:39.020308Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Creating table 2026-01-10T12:11:39.020336Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:11:39.023116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.035785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.039655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:39.055668Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:11:39.055742Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Subscribe on create table tx: 281474976710659 2026-01-10T12:11:39.055999Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:11:39.056016Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Subscribe on create table tx: 281474976710660 2026-01-10T12:11:39.061092Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:11:39.061148Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Subscribe on create table tx: 281474976710658 2026-01-10T12:11:39.061953Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:11:39.062038Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:11:39.062058Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:11:39.183266Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:11:39.234874Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:11:39.234934Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:11:39.267629Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:11:39.267683Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Column diff is empty, finishing 2026-01-10T12:11:39.268781Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:11:39.269805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:39.271376Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:11:39.271389Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7593704466810221195:2489] Owner: [1:7593704466810221193:2487]. Successful alter request: ExecComplete 2026-01-10T12:11:39.316465Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-10T12:11:39.316550Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Column diff is empty, finishing 2026-01-10T12:11:39.316646Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593704466810221194:2488] Owner: [1:7593704466810221193:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:11:39.316691Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7593704466810221196:2490] Owner: [1:7593704466810221193:2487]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-10T12:11:39.316711Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_ ... atus: SUCCESS. Issues: 2026-01-10T12:12:20.715886Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [3:7593704644708007144:2713], ActorId: [3:7593704644708007145:2714], TraceId: ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [3:7593704644708007146:2715] SUCCESS 2026-01-10T12:12:20.716338Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjRmNzlmNDMtZmEzMWU4Ny1iZjkxNDI1My05ZmUxNTk3ZQ==, workerId: [3:7593704640413039723:2675], local sessions count: 3 2026-01-10T12:12:20.716619Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZDU4MzE3YjctOWFkNmJiY2UtYjM1ZDgxYmMtYTJlYzM4MDU=, workerId: [3:7593704644708007148:2717], local sessions count: 2 2026-01-10T12:12:20.725948Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 95, sender: [3:7593704644708007195:2730], selfId: [3:7593704580283495443:2233], source: [3:7593704644708007194:2729] 2026-01-10T12:12:20.726970Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704644708007189:3308], ActorId: [3:7593704644708007190:3309], TraceId: ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ODUzOGU1ZjAtNDJkMWY3ZTEtOTc1YWZjNGUtZTQ4Y2IxNTI=, TxId: 2026-01-10T12:12:20.727698Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704644708007189:3308], ActorId: [3:7593704644708007190:3309], TraceId: ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ODUzOGU1ZjAtNDJkMWY3ZTEtOTc1YWZjNGUtZTQ4Y2IxNTI=, TxId: 2026-01-10T12:12:20.727736Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704644708007189:3308], ActorId: [3:7593704644708007190:3309], TraceId: ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-10T12:12:20.727869Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593704644708007188:3307], ActorId: [3:7593704644708007189:3308], TraceId: ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a, RequestDatabase: /dc-1, Got response [3:7593704644708007190:3309] SUCCESS 2026-01-10T12:12:20.727936Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704644708007187:3306] ActorId: [3:7593704644708007188:3307] Database: /dc-1 ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a. Extracted script execution operation [3:7593704644708007190:3309], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7593704640413039693:3231], LeaseGeneration: 0 2026-01-10T12:12:20.727966Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593704644708007187:3306] ActorId: [3:7593704644708007188:3307] Database: /dc-1 ExecutionId: c862d4d5-2cb20228-1f84fca7-7b7d31a. Reply success 2026-01-10T12:12:20.728752Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ODUzOGU1ZjAtNDJkMWY3ZTEtOTc1YWZjNGUtZTQ4Y2IxNTI=, workerId: [3:7593704644708007194:2729], local sessions count: 1 2026-01-10T12:12:20.778409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:12:20.778446Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:21.743151Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx4rvedm02nrgdhhev4d8r, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 96, targetId: [3:7593704627528137342:2558] 2026-01-10T12:12:21.743207Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 96 timeout: 300.000000s actor id: [3:7593704649002974526:3326] 2026-01-10T12:12:21.764598Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974530:3328], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.764650Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974530:3328], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.767089Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593704649002974527:2741], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:21.767678Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, ActorId: [3:7593704627528137342:2558], ActorState: ExecuteState, LegacyTraceId: 01kekx4rvedm02nrgdhhev4d8r, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:21.767834Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx4rvedm02nrgdhhev4d8r", Forwarded response to sender actor, requestId: 96, sender: [3:7593704649002974525:2740], selfId: [3:7593704580283495443:2233], source: [3:7593704627528137342:2558] 2026-01-10T12:12:21.782312Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx4rwn3jn1zjkhz38r1gtt, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 97, targetId: [3:7593704627528137342:2558] 2026-01-10T12:12:21.782358Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 97 timeout: 300.000000s actor id: [3:7593704649002974533:3329] 2026-01-10T12:12:21.800789Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974537:3331], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.800826Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974537:3331], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.803736Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593704649002974534:2744], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_execution_leases]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:21.804643Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, ActorId: [3:7593704627528137342:2558], ActorState: ExecuteState, LegacyTraceId: 01kekx4rwn3jn1zjkhz38r1gtt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_execution_leases]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:21.804845Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx4rwn3jn1zjkhz38r1gtt", Forwarded response to sender actor, requestId: 97, sender: [3:7593704649002974532:2743], selfId: [3:7593704580283495443:2233], source: [3:7593704627528137342:2558] 2026-01-10T12:12:21.819303Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx4rxt7b091tn3wszc33sx, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 98, targetId: [3:7593704627528137342:2558] 2026-01-10T12:12:21.819353Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 98 timeout: 300.000000s actor id: [3:7593704649002974540:3332] 2026-01-10T12:12:21.836378Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974544:3334], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.836405Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7593704649002974544:3334], for# root@builtin, access# DescribeSchema 2026-01-10T12:12:21.838849Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593704649002974541:2747], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/result_sets]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:21.840465Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, ActorId: [3:7593704627528137342:2558], ActorState: ExecuteState, LegacyTraceId: 01kekx4rxt7b091tn3wszc33sx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/result_sets]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:21.840629Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx4rxt7b091tn3wszc33sx", Forwarded response to sender actor, requestId: 98, sender: [3:7593704649002974539:2746], selfId: [3:7593704580283495443:2233], source: [3:7593704627528137342:2558] 2026-01-10T12:12:21.867807Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YjEzNDcyMWItOGQ3NTAxMzItMjk0ZjM2N2ItYzVhYTE0MmI=, workerId: [3:7593704627528137342:2558], local sessions count: 0 |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> KqpTx::SnapshotRO [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 24948, MsgBus: 30205 2026-01-10T12:11:54.828287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:54.860277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:55.051142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:55.051240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:55.052763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:55.153317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:55.160590Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:55.160901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704531894956301:2081] 1768047113863978 != 1768047113863981 2026-01-10T12:11:55.160944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:55.286530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:55.286578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:55.286589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:55.286675Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:56.204225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:56.220589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:56.327242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:56.603842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:56.825059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:56.994772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:59.534708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704557664761955:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.534900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.544252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704557664761965:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.544361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.975223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.017764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.056194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.094740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.157777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.205365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.243610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.309594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.382679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561959730137:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.382746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.382842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561959730142:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.382946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561959730143:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.383010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.386747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:00.397927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704561959730146:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:00.458961Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704561959730197:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:03.856250Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZmQ5ODRjZTktODhiYzMxY2ItNTQzYTNjNjMtMWJjODVlMTg=, ActorId: [1:7593704574844632407:2535], ActorState: ReadyState, LegacyTraceId: 01kekx47c492kbzrq4c2a0fz1v, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekx46npepg923jwc1075799" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 23263, MsgBus: 15861 2026-01-10T12:12:05.270883Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704580222581614:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:05.271270Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: flin ... 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.917079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.923576Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:17.126797Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.217652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.021135Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704647604062320:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.021232Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.022152Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704647604062330:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.022217Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.099504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.145385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.194003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.238212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.284865Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.327946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.368132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.434492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.537331Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704647604063198:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.537456Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.537968Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704647604063203:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.538015Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704647604063204:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.538052Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.542815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:20.564954Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-10T12:12:20.565258Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704647604063207:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:20.653932Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704647604063258:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:22.649072Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-10T12:12:22.649268Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:12:22.649392Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:12:22.649564Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7593704656193998195:2532], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [3:7593704656193998170:2532]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[3:7593704656193998195:2532].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:12:22.650279Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593704656193998189:2532], SessionActorId: [3:7593704656193998170:2532], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7593704656193998170:2532]. 2026-01-10T12:12:22.650561Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MmNjYTY3NmEtYzNmNDIzOWItMWU1NzQxY2QtZTZkODljODQ=, ActorId: [3:7593704656193998170:2532], ActorState: ExecuteState, LegacyTraceId: 01kekx4smf2sj2a4w0vf8jkp2z, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7593704656193998190:2532] from: [3:7593704656193998189:2532] trace_id# 2026-01-10T12:12:22.651045Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593704656193998190:2532] TxId: 281474976715673. Ctx: { TraceId: 01kekx4smf2sj2a4w0vf8jkp2z, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MmNjYTY3NmEtYzNmNDIzOWItMWU1NzQxY2QtZTZkODljODQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:12:22.651415Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MmNjYTY3NmEtYzNmNDIzOWItMWU1NzQxY2QtZTZkODljODQ=, ActorId: [3:7593704656193998170:2532], ActorState: ExecuteState, LegacyTraceId: 01kekx4smf2sj2a4w0vf8jkp2z, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KeyValue`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-10T12:12:22.750835Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MmNjYTY3NmEtYzNmNDIzOWItMWU1NzQxY2QtZTZkODljODQ=, ActorId: [3:7593704656193998170:2532], ActorState: ExecuteState, LegacyTraceId: 01kekx4srd6jbqt29nr35e7drh, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekx4sm07s6btmh7hctsnckf" issue_code: 2015 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 64086, MsgBus: 15915 2026-01-10T12:11:55.315713Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704538071571069:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:55.323837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:55.350532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:55.903556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:55.903720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:55.938000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:55.965092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:56.051421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.216558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:56.356454Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:56.504913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:56.504937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:56.504951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:56.505036Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:57.373763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:57.389788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:57.481655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.784348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.169139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.281875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.316285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704538071571069:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:00.316362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:01.386775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704563841376676:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.386876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.387414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704563841376686:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.387470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.906406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.987904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.042123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.085057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.125972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.207716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.293031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.404503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.493778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704568136344863:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.493857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.494040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704568136344868:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.494095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704568136344869:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.494126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.498138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:02.510396Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704568136344872:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:02.594629Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704568136344925:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2794, MsgBus: 6370 2026-01-10T12:12:06.228187Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists. ... 15.604292Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704625031255667:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:15.604410Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:15.616026Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:15.770317Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:15.773380Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704625031255450:2081] 1768047135542996 != 1768047135542999 2026-01-10T12:12:15.834932Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:15.835044Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:15.836741Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:15.840096Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:15.975262Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:15.984576Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:15.984593Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:15.984599Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:15.984652Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:16.543718Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:16.596783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:16.608986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:16.623408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.696777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.927158Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.028987Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:19.795111Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704642211126504:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:19.795200Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:19.795549Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704642211126514:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:19.795595Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:19.877495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:19.930278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:19.970466Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.010124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.045395Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.104821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.142791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.210732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.316476Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704646506094682:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.316588Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.316910Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704646506094687:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.316994Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.317031Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704646506094688:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.321851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:20.342818Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704646506094691:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:20.439409Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704646506094742:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:20.590263Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704625031255667:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:20.590357Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 29370, MsgBus: 30166 2026-01-10T12:11:50.101117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704517079698553:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:50.101527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:50.493474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:50.501026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:50.501175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:50.610649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:50.629655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:50.831305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:50.860333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:50.860367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:50.860383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:50.860473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:51.104855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:51.362486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:55.097780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704517079698553:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:55.097859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:55.382406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704538554535751:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.382556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.383598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704538554535763:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.383691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704538554535764:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.383865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.396146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:55.410775Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704538554535767:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:55.588697Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704538554535818:2542] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:56.087827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:56.293364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.801046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.009228Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjM2NWI5N2YtNWQ0MDIxZWEtYTJjZGE3ZjItMzdjMjliNjY=, ActorId: [1:7593704560029380252:2967], ActorState: ExecuteState, LegacyTraceId: 01kekx45fm8st955ktb9rvwfr9, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 29 } issue_code: 2008 severity: 1 } trace_id#
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 28021, MsgBus: 24069 2026-01-10T12:12:03.578806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:03.578911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:03.743118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:03.743204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:03.745731Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:03.768243Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704574135980075:2081] 1768047123490022 != 1768047123490025 2026-01-10T12:12:03.792472Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:03.921001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:03.924767Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:03.924789Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:03.924796Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:03.924872Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:04.560367Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:04.694448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:04.705150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:08.251545Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704595610817431:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.251705Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.252284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704595610817451:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.257860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:08.260223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704595610817480:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.260314Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.262884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704595610817483:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.263154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.280025Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593704595610817453:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:08.343355Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593704595610817508:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:08.452379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.516419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:09.922879Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 24222, MsgBus: 26847 2026-01-10T12:12:13.658404Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:13.658593Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:13.916164Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:13.961723Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704615357843074:2081] 1768047133547242 != 1768047133547245 2026-01-10T12:12:13.979490Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:14.060705Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:14.060827Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:14.149394Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:14.360832Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:14.360856Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:14.360864Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:14.360985Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:14.516200Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:14.655501Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:15.078577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:18.408508Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704636832680459:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.408576Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704636832680441:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.408718Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.420260Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704636832680464:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.420459Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.421695Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704636832680491:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.421828Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:18.422632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:18.451217Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704636832680463:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:18.512304Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704636832680518:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:18.638844Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:18.697851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:19.858143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::LostUpdate-IsOlap [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::BackupAbsentCollection |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 10048, MsgBus: 65090 2026-01-10T12:11:54.737793Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704532540617648:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:54.762107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:55.344288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:55.372300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:55.372403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:55.538731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:55.609622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:55.616199Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704532540617608:2081] 1768047114701600 != 1768047114701603 2026-01-10T12:11:55.633450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:55.783383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:55.890934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:55.890958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:55.890969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:55.891059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:56.641858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:56.777721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.023223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.374699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.495681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:59.744225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704532540617648:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:59.744311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:00.990586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704558310423269:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.990719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.991202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704558310423279:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.991243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.390013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.441763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.490875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.552286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.592959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.659131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.799723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.904623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.066216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704566900358757:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.066312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.066968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704566900358762:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.067051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704566900358763:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.067087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.073271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:02.096627Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704566900358766:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:02.170423Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704566900358817:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:04.667980Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=Njc5MDM5MjgtMjYyMGIyNzMtYWEyZDIxODAtNThiYmUyYTI=, ActorId: [1:7593704575490293728:2540], ActorState: ReadyState, LegacyTraceId: 01kekx485bdb07e4jwsmc3rpe9, Create QueryResponse for error on request, msg: status# NOT_FOUND issue ... undelivered_message; 2026-01-10T12:12:16.377733Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:16.378973Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704627772619155:2081] 1768047136182269 != 1768047136182272 2026-01-10T12:12:16.379120Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:16.416665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:16.416744Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:16.428027Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:16.612608Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:16.612631Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:16.612638Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:16.612705Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:16.630313Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:17.212466Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:17.324753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:17.331034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:17.340190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.426082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.584627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.697958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:20.906868Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704644952490203:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.906987Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.907561Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704644952490213:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.907617Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.008139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.052548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.108460Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.163783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.189961Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704627772619260:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:21.190037Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:21.208989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.270612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.365684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.416224Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.492597Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704649247458386:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.492732Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.492800Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704649247458391:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.494143Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704649247458393:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.494222Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:21.498440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:21.511941Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704649247458394:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:21.596103Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704649247458446:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:24.318944Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NDVkMjBkYjAtNWQ4NmJjNzAtYzk2NWY1ZDktMTllOTU2NWY=, ActorId: [3:7593704657837393360:2535], ActorState: ExecuteState, LegacyTraceId: 01kekx4v8xcsnwaeba002vbxhs, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 25 } issue_code: 2008 severity: 1 } trace_id#
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> TestScript::StepMerging >> TBackupCollectionTests::DropEmptyBackupCollection [GOOD] >> TBackupCollectionTests::DropNonExistentCollection >> TestScript::StepMerging [GOOD] >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> KqpTx::RollbackInvalidated [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> KqpTx::EmptyTxOnCommit [GOOD] >> KqpTx::CommitStats >> TBackupCollectionTests::DropNonExistentCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithMultipleBackups >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |87.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 6525, MsgBus: 5499 2026-01-10T12:11:52.062126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704525774210591:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:52.062293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:52.466516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:52.466603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:52.557565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:52.568168Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704525774210464:2081] 1768047112049924 != 1768047112049927 2026-01-10T12:11:52.582545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:52.583286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:52.786763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:52.786791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:52.786797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:52.786871Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:52.794338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:53.080236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:53.649238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:53.771342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.046208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.489018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.631988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.064166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704525774210591:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:57.064234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:57.138186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704547249048829:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:57.138306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:57.138759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704547249048839:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:57.138798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:57.497401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.566402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.611393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.668219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.722320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.784285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.844019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.953277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.103360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704551544017015:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.103448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.103553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704551544017020:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.103952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704551544017022:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.104029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.108987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:58.122637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704551544017023:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:11:58.195170Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704551544017075:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:07.440187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:12:07.440214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 31673, MsgBus: 8865 2026-01-10T12:12:10.261409Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exi ... rvice] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:20.434384Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:20.434407Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:20.434413Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:20.434496Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:21.117482Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:21.155120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:21.163430Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:21.170413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.263532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.488200Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.550273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.617456Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704663580741895:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.617583Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.618045Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704663580741905:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.618109Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.700248Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.748256Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.792403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.839740Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.889731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.968700Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:25.032178Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:25.093825Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:25.131764Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704646400870892:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:25.137452Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:25.217776Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667875710071:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.217857Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.218161Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667875710076:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.218203Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667875710077:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.218237Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.221597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:25.233694Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704667875710080:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:25.290886Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704667875710131:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:27.214273Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593704676465645067:2545], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:27.216563Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YjBkYmUxNmUtNzg1ZDY2YzEtNzVlNDI4OGItYmViZGU5NzU=, ActorId: [3:7593704676465645041:2533], ActorState: ExecuteState, LegacyTraceId: 01kekx4y5y65gp6wjnaagmf7be, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/BadTable]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# 01kekx4y5f317c0dv0ct0kwf7k trace_id# 2026-01-10T12:12:27.234665Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjBkYmUxNmUtNzg1ZDY2YzEtNzVlNDI4OGItYmViZGU5NzU=, ActorId: [3:7593704676465645041:2533], ActorState: ReadyState, LegacyTraceId: 01kekx4y727wx70zd1tzr8vjfd, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekx4y5f317c0dv0ct0kwf7k" issue_code: 2015 severity: 1 } trace_id# |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 27138, MsgBus: 12735 2026-01-10T12:11:55.690436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704537105138136:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:55.690644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:55.807148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:56.265854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:56.265947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:56.289544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:56.293705Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704537105137970:2081] 1768047115643655 != 1768047115643658 2026-01-10T12:11:56.302991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:56.303258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.564187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:56.610956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:56.610973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:56.610977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:56.611027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:56.694495Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:57.359827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:57.546769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.837753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.131023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.259322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.696211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704537105138136:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:00.696288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:01.837244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704562874943630:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.837376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.844238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704562874943640:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.844335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.359577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.421782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.472605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.560142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.611585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.687708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.774997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.854130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:03.008552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704571464879112:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:03.008673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:03.008981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704571464879118:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:03.009007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704571464879117:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:03.009072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:03.016016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:03.030288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704571464879121:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:03.115935Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704571464879180:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:09.405146Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id= ... accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:20.372381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:20.471977Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:20.476238Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704646482548225:2081] 1768047140302134 != 1768047140302137 2026-01-10T12:12:20.507586Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:20.507686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:20.520503Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:20.640700Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:20.756725Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:20.756747Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:20.756756Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:20.756837Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:21.332603Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:21.537571Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:21.544984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:21.553050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.625130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.862691Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.957502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.478765Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704663662419279:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.478901Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.480398Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704663662419289:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.480473Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:24.549245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.584758Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.623199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.657654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.748930Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.792806Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.842231Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.910815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:25.061738Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667957387463:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.061829Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.062105Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667957387469:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.062144Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704667957387468:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.062176Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.065629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:25.081126Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704667957387472:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:25.153813Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704667957387523:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:25.305969Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704646482548267:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:25.306058Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:28.181620Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZGMxMTk3NGQtOWYxMGFjMDMtZjQzYzgzZmItMmVmNjg1Yg==, ActorId: [3:7593704676547322404:2523], ActorState: ExecuteState, LegacyTraceId: 01kekx4z0z72tk3qvzemhd219k, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TwoShard`" issue_code: 2001 severity: 1 } trace_id# |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 21190, MsgBus: 23624 2026-01-10T12:11:49.300914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704511789864246:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:49.300981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:49.878376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:49.878472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:49.963569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:49.980254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704511789864023:2081] 1768047109252725 != 1768047109252728 2026-01-10T12:11:49.980322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:49.991550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:50.160030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:50.160055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:50.160171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:50.160285Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:50.257648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:50.284913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:51.064587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:51.076788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:51.188402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:51.421635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:51.632353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:51.733256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:53.794172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704528969735085:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.794320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.794795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704528969735095:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.794843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.304733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704511789864246:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:54.304809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:54.386403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.457111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.504621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.543008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.582754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.642488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.759200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:54.849224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:55.084457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704537559670578:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.084540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.084991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704537559670583:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.085032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704537559670584:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.085150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:55.089822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:55.111144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704537559670587:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:11:55.230297Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704537559670638:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61050, MsgBus: 28926 2026-01-10T12:12:00.082518Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsAct ... 57:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:13.508019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:13.527337Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:13.647757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:13.889045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:14.046529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.167487Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704613936769951:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:17.167561Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:17.417853Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704635411608266:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.417937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.418267Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704635411608276:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.418313Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.498941Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.544307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.579030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.612197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.649470Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.697499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.748457Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.811244Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:17.925787Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704635411609149:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.925912Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.926363Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704635411609154:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.926442Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704635411609155:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.926573Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:17.931404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:17.948841Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704635411609158:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:18.008262Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704639706576505:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:27.444225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:12:27.444252Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:28.728817Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7593704682656250091:2654], TxId: 281474976715680, task: 1. Ctx: { CheckpointId : . TraceId : 01kekx4z9y4h428e8mc8cx2ska. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZWY2MWM3MGItZmE5YzcyM2YtZGNiNDZlOWMtNjA3NmIxNmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1768047140104/18446744073709551615 shard 72075186224037888 with lowWatermark v1768047140622/18446744073709551615 (node# 3 state# Ready) } } 2026-01-10T12:12:28.729418Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593704682656250091:2654], TxId: 281474976715680, task: 1. Ctx: { CheckpointId : . TraceId : 01kekx4z9y4h428e8mc8cx2ska. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZWY2MWM3MGItZmE5YzcyM2YtZGNiNDZlOWMtNjA3NmIxNmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1768047140104/18446744073709551615 shard 72075186224037888 with lowWatermark v1768047140622/18446744073709551615 (node# 3 state# Ready) } }. 2026-01-10T12:12:28.730120Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593704682656250092:2655], TxId: 281474976715680, task: 2. Ctx: { CheckpointId : . TraceId : 01kekx4z9y4h428e8mc8cx2ska. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZWY2MWM3MGItZmE5YzcyM2YtZGNiNDZlOWMtNjA3NmIxNmE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593704682656250087:2535], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-10T12:12:28.730771Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWY2MWM3MGItZmE5YzcyM2YtZGNiNDZlOWMtNjA3NmIxNmE=, ActorId: [3:7593704648296511417:2535], ActorState: ExecuteState, LegacyTraceId: 01kekx4z9y4h428e8mc8cx2ska, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 38 has no snapshot at v1768047140104/18446744073709551615 shard 72075186224037888 with lowWatermark v1768047140622/18446744073709551615 (node# 3 state# Ready)" severity: 1 } } trace_id# >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TBackupCollectionTests::DropCollectionWithMultipleBackups [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables >> KqpSinkMvcc::UpdateColumns+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns-IsOlap |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> PgCatalog::PgTables [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2026-01-10T12:10:20.661030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704129354242203:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:20.661083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:21.690488Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:21.762914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:21.763019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:21.802034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:21.802166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:21.807064Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:21.835861Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:62618) connection closed with error: Connection refused 2026-01-10T12:10:21.839105Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:10:21.850770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:21.969891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:25.789066Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704152081187847:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:25.789443Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:25.927058Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:26.136001Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:26.171870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:26.171957Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:26.188899Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:28426) connection closed with error: Connection refused 2026-01-10T12:10:26.192392Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:10:26.208786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:26.228158Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:26.550728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:26.758453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:30.722144Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704175133468831:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:30.722198Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:30.762338Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:30.972242Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.020261Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704175133468737:2081] 1768047030717737 != 1768047030717740 2026-01-10T12:10:31.041469Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:31.041584Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:31.044253Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:31.094881Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:26525) connection closed with error: Connection refused 2026-01-10T12:10:31.098520Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:10:31.100520Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:31.163326Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:31.748229Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:36.032856Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704199279497479:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:36.032925Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:36.103518Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:36.184193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:36.398774Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593704199279497440:2081] 1768047036013552 != 1768047036013555 2026-01-10T12:10:36.398890Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:36.456274Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:36.479919Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:36.480059Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:36.484504Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:19935) connection closed with error: Connection refused 2026-01-10T12:10:36.496421Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:10:36.522924Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:36.957430Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:37.072160Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:41.305575Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593704222637440692:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:41.305628Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:10:41.344206Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:10:41.490849Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:41.496230Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593704222637440649:2081] 1768047041299882 != 1768047041299885 2026-01-10T12:10:41.515191Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:41.515287Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:41.518049Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:24020) connection closed with error: Connection refused 2026-01-10T12:10:41.519085Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:10:41.529845Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:41.591396Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:10:42.324549Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:10:45.835118Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593704236121402077:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:10:45.835163Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=ac ... 6-01-10T12:11:47.751842Z node 18 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:27806) connection closed with error: Connection refused 2026-01-10T12:11:47.752541Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:47.752645Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:47.752962Z node 18 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:11:47.797882Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:47.938543Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:48.594886Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:54.612351Z node 19 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7593704536563941362:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:54.612462Z node 19 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:55.072471Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:11:55.081142Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:55.113426Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:55.113542Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:55.114843Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:63342) connection closed with error: Connection refused 2026-01-10T12:11:55.117321Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:11:55.140282Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:55.358446Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:11:55.660445Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:02.709359Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7593704570342888629:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:02.709471Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:02.726114Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:02.850930Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:02.851384Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:02.871299Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:12061) connection closed with error: Connection refused 2026-01-10T12:12:02.872144Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:12:02.876201Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:02.876322Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:02.912895Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:03.110788Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:03.722534Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:10.484197Z node 21 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7593704601459540334:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:10.484379Z node 21 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:10.560247Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:10.690636Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:10.694038Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [21:7593704601459540198:2081] 1768047130398751 != 1768047130398754 2026-01-10T12:12:10.709996Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:10.710129Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:10.747573Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#34,[::1]:27344) connection closed with error: Connection refused 2026-01-10T12:12:10.748916Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:10.749331Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:12:10.823395Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:11.484339Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:18.251382Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7593704636148576834:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:18.251507Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:18.279205Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:18.468166Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:18.472342Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:18.476435Z node 22 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [22:7593704636148576795:2081] 1768047138250630 != 1768047138250633 2026-01-10T12:12:18.498379Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:18.508057Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:18.562503Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#36,[::1]:1581) connection closed with error: Connection refused 2026-01-10T12:12:18.569338Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:12:18.602041Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:18.727580Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:19.260224Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:25.038549Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7593704666708788151:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:25.038634Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:25.235235Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:25.263467Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:25.263595Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:25.310964Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:25.313914Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:25.315510Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#38,[::1]:24902) connection closed with error: Connection refused 2026-01-10T12:12:25.317834Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:12:25.440214Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:26.080270Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:12:28.832530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:12:28.832631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:28.832691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:12:28.832734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:12:28.832771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:12:28.832800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:12:28.832858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:28.832955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:12:28.833923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:12:28.834266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:12:28.938909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:28.938979Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:28.960422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:12:28.960551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:12:28.960752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:12:29.004018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:12:29.004312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:12:29.005121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:12:29.005373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:12:29.008001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:29.008216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:12:29.009546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:12:29.009608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:29.009695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:12:29.009738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:12:29.009793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:12:29.010064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:12:29.170984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.174849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.175957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.176015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:29.176094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-10T12:12:33.321186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-10T12:12:33.321272Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-10T12:12:33.321342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:12:33.321394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:12:33.321603Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:12:33.321792Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:12:33.321864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:12:33.325733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:12:33.326608Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:12:33.326654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:12:33.326817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:12:33.327002Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:33.327038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:12:33.327075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:12:33.327760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:12:33.327810Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:12:33.327927Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:12:33.327967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:12:33.328027Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:12:33.328062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:12:33.328127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:12:33.328170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:12:33.328209Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:12:33.328250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:12:33.328402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:12:33.328443Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:12:33.328482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:12:33.328513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:12:33.332625Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:12:33.332726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:12:33.332763Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:12:33.332809Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:12:33.332853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:12:33.333929Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:12:33.334005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:12:33.334055Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:12:33.334082Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:12:33.334112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:12:33.334186Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:12:33.337466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:12:33.337730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:12:33.337923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:12:33.337968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:12:33.338390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:12:33.338485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:12:33.338525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:981:2911] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } TestModificationResults wait txId: 105 2026-01-10T12:12:33.341918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:12:33.342169Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:12:33.342371Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid number of child partitions: 1, at schemeshard: 72057594046678944 2026-01-10T12:12:33.344574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid number of child partitions: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:12:33.344836Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid number of child partitions: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:12:33.345148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:12:33.345189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:12:33.345561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:12:33.345654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:12:33.345690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:988:2918] TestWaitNotification: OK eventTxId 105 |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> KqpRollback::DoubleUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdate |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |87.7%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 |87.7%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::DropCollectionWithFullBackup [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup >> KqpTx::CommitPrepared [GOOD] >> TLocksTest::Range_IncorrectDot1 >> KqpTx::CommitStats [GOOD] >> TLocksFatTest::RangeSetBreak >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> TCancelTx::CrossShardReadOnly >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::CreatePartitionStream >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] Test command err: Trying to start YDB, gRPC: 18972, MsgBus: 30820 2026-01-10T12:12:28.395156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704680956440846:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:28.395502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:28.423569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:28.820126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:28.830111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:28.830208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:28.864583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:29.028508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704680956440741:2081] 1768047148375568 != 1768047148375571 2026-01-10T12:12:29.034777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:29.104565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:29.124836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:29.124860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:29.124875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:29.124956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:29.397621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:29.662859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:29.670532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:29.732145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.868797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:30.062336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:30.132150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.126572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704698136311820:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.126728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.127127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704698136311830:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.127189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.504658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.561525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.596688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.629680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.679217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.713170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.758209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.813007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.901302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704698136312709:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.901375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.901578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704698136312715:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.901585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704698136312714:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.901628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.908840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:32.930119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704698136312718:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:33.008828Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704702431280065:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:33.383026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704680956440846:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:33.384462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:34.722910Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:12:34.723051Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C9D43E16598 2026-01-10T12:12:34.723094Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593704706726247658:2533], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kekx55h1ehb5w1w12780yyj1, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJhZjY5M2EtOWExOTYwNWUtNWQ0MDNlODctZDRjMmYzNDY=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:12:34.723236Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:12:34.723284Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593704706726247658:2533], queueSize: 1 2026-01-10T12:12:34.723976Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593704706726247658:2533], compileActor: [1:7593704706726247666:2538] 2026-01-10T12:12:34.724014Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:12:34.724115Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593704706726247666:2538], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-10T12:12:34.723980Z 2026-01-10T12:12:34.870871Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593704706726247666:2538]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047154","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"520d9b87-b253fc60-6abc5692-b4addadf","version":"1.0"} 2026-01-10T12:12:34.871263Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593704706726247666:2538], duration: 0.147265s 2026-01-10T12:12:34.871298Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593704706726247666:2538], owner: [1:7593704698136311779:2383], status: SUCCESS, issues: , uid: 520d9b87-b253fc60-6abc5692-b4addadf 2026-01-10T12:12:34.871429Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593704706726247658:2533], status: SUCCESS, compileActor: [1:7593704706726247666:2538] 2026-01-10T12:12:34.871485Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593704706726247658:2533], queryUid: 520d9b87-b253fc60-6abc5692-b4addadf, status:SUCCESS |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TLocksTest::GoodDupLock >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 7928, MsgBus: 17795 2026-01-10T12:12:10.485921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704601734483348:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:10.486014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:10.987944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:10.988035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:10.996116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:11.156111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:11.159749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:11.208233Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704601734483320:2081] 1768047130484123 != 1768047130484126 2026-01-10T12:12:11.420691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:11.432796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:11.432829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:11.432837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:11.432926Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:11.497908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:12.015260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:12.098330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:12.277871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:12.549507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:12.664805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:15.488190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704601734483348:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:15.488285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:15.838828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704623209321684:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.838979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.839348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704623209321694:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.839382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.378755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.430296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.471265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.514372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.561916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.619961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.689138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.740372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:16.854027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704627504289866:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.854119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.855530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704627504289871:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.855602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704627504289872:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.855933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.859461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:16.872943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704627504289875:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:16.968943Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704627504289926:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12747, MsgBus: 19762 2026-01-10T12:12:20.937403Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704644752678675:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:20.939309Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... 2:12:26.027443Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593704670522485217:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22108, MsgBus: 7515 2026-01-10T12:12:29.079690Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704684456749494:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:29.079799Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:29.105624Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:29.241375Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:29.262886Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:29.262986Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:29.290973Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:29.341222Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:29.412781Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:29.412804Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:29.412811Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:29.412890Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:29.976760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:29.984878Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:30.000362Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:30.089057Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:30.098442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:30.321818Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:30.393143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.905255Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704697341653215:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.905366Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.913539Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704697341653225:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.913662Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.985334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.025565Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.059468Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.139984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.182482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.260263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.326168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.388651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.487878Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701636621400:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.487980Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.488327Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701636621405:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.488382Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701636621406:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.488514Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.492245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:33.504559Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704701636621409:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:33.589075Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704701636621460:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:34.079914Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704684456749494:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:34.079996Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 5288, MsgBus: 13311 2026-01-10T12:12:02.792378Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704567704815140:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:02.799843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:03.270243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:03.279653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:03.288279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:03.364520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:03.420155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:03.558887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:03.597871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:03.597894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:03.597900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:03.597986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:03.804825Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:04.384753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:04.484951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:04.732787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:05.088277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:05.260930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:07.784815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704589179653374:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:07.784968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:07.787554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704567704815140:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:07.787695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704589179653384:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:07.787765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:07.787821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.326077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.372682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.416499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.453253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.486184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.538335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.615654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.699057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.836174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704593474621561:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.836272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.836620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704593474621566:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.836665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704593474621567:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.836780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:08.845415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:08.867482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704593474621570:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:12:08.938245Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704593474621621:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:18.103013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:12:18.103044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:19.799907Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7593704640719262477:2651], TxId: 281474976710680, task: 1. Ctx: { CheckpointId : . TraceId : 01kekx4pmm8y3yyyw01rq965w6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTU4N ... ATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18539, MsgBus: 22450 2026-01-10T12:12:28.539343Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704682383408230:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:28.543033Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:28.655616Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:28.660439Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704682383408186:2081] 1768047148536952 != 1768047148536955 2026-01-10T12:12:28.688638Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:28.717048Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:28.717153Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:28.740628Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:28.881546Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:28.928689Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:28.928723Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:28.928731Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:28.928818Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:29.523815Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:29.532731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:29.544183Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:29.554605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.623152Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.804109Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.879585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.591430Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704699563279251:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.591518Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.591892Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704699563279260:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.591939Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.670282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.713566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.755208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.789735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.829541Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.886893Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.939853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.019838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.126519Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704703858247427:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.126628Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.126996Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704703858247432:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.127048Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704703858247433:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.127084Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.130884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:33.146375Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704703858247436:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:33.217437Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704703858247487:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:33.540220Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704682383408230:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:33.540296Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 25437, MsgBus: 17257 2026-01-10T12:11:55.446564Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704538939859472:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:55.464347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:56.038931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:56.040795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:56.040886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:56.070716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:56.283026Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.329248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:56.546802Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:56.572479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:56.572508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:56.572518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:56.572596Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:57.348196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:57.356026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:11:57.431350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.671848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.881139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:57.984664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.448198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704538939859472:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:00.448276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:01.365563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704564709665097:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.365725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.375109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704564709665107:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.375262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:01.930530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.991382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.053947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.105889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.149521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.221302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.285201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.377926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.482262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704569004633279:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.482414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.484097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704569004633284:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.484151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704569004633285:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.484196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.493384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:02.507212Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704569004633288:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:02.605990Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704569004633343:3788] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:06.070849Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593704586184502903:2540], SessionActorId: [1:7593704577594568254:2540], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7593704577594568254:2540]. 2026-01-10T12:12:06.071128Z node 1 :KQP_SESSION WARN: { ... delivered_message; 2026-01-10T12:12:27.928541Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:28.029351Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:28.033817Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593704677254727264:2081] 1768047147887039 != 1768047147887042 2026-01-10T12:12:28.045155Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:28.045276Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:28.069899Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:28.126069Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:28.174568Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:28.174602Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:28.174610Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:28.174711Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:28.870413Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:28.881568Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:28.901767Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:28.909045Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:29.014514Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.346125Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:29.457440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:31.996609Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704694434598324:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:31.996737Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:31.997105Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704694434598334:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:31.997181Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.078831Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.125222Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.209733Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.258438Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.294345Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.333174Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.380906Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.445950Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:32.547872Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704698729566499:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.547975Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.548432Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704698729566504:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.548506Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704698729566505:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.548650Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:32.557621Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:32.572886Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593704698729566508:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:32.639005Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593704698729566559:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:32.896999Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593704677254727318:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:32.897096Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:35.667476Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=N2MxMDc4YjItZTBlNDNkZTMtOWE1MzgzMTQtMzU0ZjVjNw==, ActorId: [4:7593704707319501472:2533], ActorState: ExecuteState, LegacyTraceId: 01kekx564s3jg1mr52gpbtz2eh, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] Test command err: Trying to start YDB, gRPC: 32044, MsgBus: 8516 ... waiting for SysViewsRoster update finished 2026-01-10T12:12:02.599026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:02.741466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:12:02.751515Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:12:02.751887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:02.752005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:12:03.236842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:03.236972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:03.243983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047117626121 != 1768047117626124 2026-01-10T12:12:03.261922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:03.317540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:03.476510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:12:04.150488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:04.150550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:04.150617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:04.151306Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:04.179686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:04.901330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:04.983217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:04.983338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:04.983430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:04.988736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:04.988942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:04.999223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:05.104470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:12:05.217794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:05.630481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:06.044948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:08.941011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 7080, MsgBus: 26021 ... waiting for SysViewsRoster update finished 2026-01-10T12:12:19.234528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:19.258463Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:12:19.258803Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:19.258858Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:12:19.544786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:19.544906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:19.577817Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047135099947 != 1768047135099951 2026-01-10T12:12:19.582644Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:19.630695Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:19.765623Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:12:20.149362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:20.149433Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:20.149474Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:20.150026Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:20.162576Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:20.546851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:20.611020Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:910:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.611141Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:920:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.611656Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.612814Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.613062Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:20.621804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:20.730541Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:924:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:12:20.766146Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:981:2821] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:20.841423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:21.137305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:24.057223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 64749, MsgBus: 21698 2026-01-10T12:12:29.343604Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704684047737504:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:29.354055Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:29.380949Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:29.499335Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704684047737475:2081] 1768047149339069 != 1768047149339072 2026-01-10T12:12:29.515847Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:29.515962Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:29.518170Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:29.537851Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:12:29.539279Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:12:29.554285Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:29.607063Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:29.607082Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:29.607091Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:29.607160Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:29.659407Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:30.152539Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:30.162386Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:30.359329Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:33.312968Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701227607520:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.313165Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.314618Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701227607555:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.314675Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704701227607556:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.314861Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:33.319782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:33.331861Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704701227607559:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:33.435186Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704701227607612:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:33.505404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:33.606276Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:34.351164Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704684047737504:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:34.351830Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:34.541775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:35.711510Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:36.206408Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593704714112517683:2963], SessionActorId: [3:7593704709817550244:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 . sessionActorId=[3:7593704709817550244:2963]. 2026-01-10T12:12:36.206898Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YWJmNzRhYzctNzk1YmZhNTQtZjY2N2NhM2ItNGUxMGVkMWU=, ActorId: [3:7593704709817550244:2963], ActorState: ExecuteState, LegacyTraceId: 01kekx56wc8ndrwjn4k570eycy, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593704714112517684:2963] from: [3:7593704714112517683:2963] trace_id# 2026-01-10T12:12:36.207096Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593704714112517684:2963] TxId: 281474976710666. Ctx: { TraceId: 01kekx56wc8ndrwjn4k570eycy, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YWJmNzRhYzctNzk1YmZhNTQtZjY2N2NhM2ItNGUxMGVkMWU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 } trace_id# 2026-01-10T12:12:36.207380Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YWJmNzRhYzctNzk1YmZhNTQtZjY2N2NhM2ItNGUxMGVkMWU=, ActorId: [3:7593704709817550244:2963], ActorState: ExecuteState, LegacyTraceId: 01kekx56wc8ndrwjn4k570eycy, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`." issue_code: 2001 severity: 1 } trace_id# |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-10T12:12:38.363033Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.363062Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.363080Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.376375Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:38.377056Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:38.377121Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.378232Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.378254Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.378272Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.378593Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:38.379358Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:38.379411Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.380521Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.380546Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.380580Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.380945Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:38.381004Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.381034Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.381193Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-10T12:12:38.383319Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.383356Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.383374Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.383964Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:12:38.384007Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.384044Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.384128Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-10T12:12:38.385156Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:12:38.385178Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:12:38.385198Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.385614Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:38.387877Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:38.399772Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:12:38.400172Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:38.400581Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-10T12:12:38.405914Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-10T12:12:38.406198Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:38.406249Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:12:38.406287Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:12:38.406311Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-10T12:12:38.406339Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-10T12:12:38.406360Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-10T12:12:38.406386Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-10T12:12:38.406403Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-10T12:12:38.406435Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-10T12:12:38.406453Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-10T12:12:38.406469Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-10T12:12:38.406501Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-10T12:12:38.406523Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-10T12:12:38.406539Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-10T12:12:38.406570Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-10T12:12:38.406588Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-10T12:12:38.406647Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-10T12:12:38.406667Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-10T12:12:38.406684Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-10T12:12:38.406717Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-10T12:12:38.406736Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-10T12:12:38.406757Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-10T12:12:38.406781Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-10T12:12:38.406803Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-10T12:12:38.406824Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-10T12:12:38.406856Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-10T12:12:38.406879Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-10T12:12:38.406915Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-10T12:12:38.406936Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-10T12:12:38.406955Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-10T12:12:38.406972Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-10T12:12:38.406991Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-10T12:12:38.407076Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-10T12:12:38.407101Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-10T12:12:38.407128Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-10T12:12:38.407151Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-10T12:12:38.407175Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-10T12:12:38.407191Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-10T12:12:38.407212Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-10T12:12:38.407229Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-10T12:12:38.407244Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-10T12:12:38.407266Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-10T12:12:38.407282Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-10T12:12:38.407313Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-10T12:12:38.407330Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-10T12:12:38.407347Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-10T12:12:38.407362Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-10T12:12:38.407382Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-10T12:12:38.407397Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-10T12:12:38.407413Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-10T12:12:38.407471Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-10T12:12:38.410189Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-10T12:12:38.410520Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-10T12:12:38.410563Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-10T12:12:38.410588Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-10T12:12:38.410607Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-10T12:12:38.410635Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-10T12:12:38.410652Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-10T12:12:38.410672Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-10T12:12:38.410692Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-10T12:12:38.410727Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-10T12:12:38.410745Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-10T12:12:38.410764Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-10T12:12:38.410800Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-10T12:12:38.410823Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-10T12:12:38.410841Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-10T12:12:38.410864Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-10T12:12:38.410893Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-10T12:12:38.410940Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-10T12:12:38.410959Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-10T12:12:38.411024Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-10T12:12:38.411044Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-10T12:12:38.411063Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-10T12:12:38.411081Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-10T12:12:38.411100Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-10T12:12:38.411119Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-10T12:12:38.411150Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-10T12:12:38.411168Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-10T12:12:38.411184Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-10T12:12:38.411215Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-10T12:12:38.411236Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-10T12:12:38.411254Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-10T12:12:38.411283Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-10T12:12:38.411312Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-10T12:12:38.411406Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-10T12:12:38.411428Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-10T12:12:38.411444Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-10T12:12:38.411460Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-10T12:12:38.411475Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-10T12:12:38.411489Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-10T12:12:38.411508Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-10T12:12:38.411524Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-10T12:12:38.411556Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-10T12:12:38.411577Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-10T12:12:38.411593Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-10T12:12:38.411608Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-10T12:12:38.411625Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-10T12:12:38.411640Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-10T12:12:38.411662Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-10T12:12:38.411692Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-10T12:12:38.411712Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-10T12:12:38.411732Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-10T12:12:38.411781Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-10T12:12:38.411938Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-10T12:12:38.415678Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.415710Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.416210Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.416639Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:38.416985Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:38.417170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.417674Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:38.518615Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.518951Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:12:38.519019Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:38.519079Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:12:38.519148Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:12:38.721199Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-10T12:12:38.821886Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:12:38.823346Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:12:38.823554Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:12:38.825339Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.825361Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.825385Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:38.825655Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:38.826104Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:38.826312Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.826763Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:38.927789Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:38.928034Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:12:38.928102Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:38.928145Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:12:38.928388Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-10T12:12:38.928489Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:12:38.928668Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:12:38.928734Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:12:38.930024Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |87.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::SimpleDataHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 17300, MsgBus: 7555 2026-01-10T12:07:00.635946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703273590428900:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:00.644061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:01.374466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:01.374547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:01.460540Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703273590428649:2081] 1768046820521908 != 1768046820521911 2026-01-10T12:07:01.460630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:01.510212Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.516638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:01.636207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:01.904388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:01.924685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:01.924704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:01.924710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:01.924790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:03.257577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:03.269624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2026-01-10T12:07:05.640250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703273590428900:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:05.640330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:06.903047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2026-01-10T12:07:07.323933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703303655200720:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:07.324112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:07.324612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703303655200732:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:07.324665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703303655200733:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:07.324699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:07:07.333639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:07:07.356221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593703303655200736:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:07:07.564028Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593703303655200787:2606] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:07:08.215118Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-10T12:07:08.220463Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-10T12:07:08.220753Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7593703307950168136:2336] TxId: 281474976710663. Ctx: { TraceId: 01kekwv5ssf7jhb94rxtgj8eqj, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjczZWE0N2UtMmVhNTFmNWUtNmJjNTQ5MzctODg0MjdlYmE=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-10T12:07:08.236969Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZjczZWE0N2UtMmVhNTFmNWUtNmJjNTQ5MzctODg0MjdlYmE=, ActorId: [1:7593703303655200717:2336], ActorState: ExecuteState, LegacyTraceId: 01kekwv5ssf7jhb94rxtgj8eqj, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2)\n" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-10T12:07:08.291014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2026-01-10T12:07:08.950251Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-10T12:07:08.957183Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-10T12:07:08.957440Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7593703307950168271:2373] TxId: 281474976710668. Ctx: { TraceId: 01kekwv6xe3mthte32hrcr98xy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODBjNWVlYzQtOTAxNTc4ZmItNzU1YTIwMGItMjAzNTUyYmU=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-10T12:07:08.957756Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODBjNWVlYzQtOTAxNTc4ZmItNzU1YTIwMGItMjAzNTUyYmU=, ActorId: [1:7593703307950168229:2373], ActorState: ExecuteState, LegacyTraceId: 01kekwv6xe3mthte32hrcr98xy, Create QueryResponse for error on request, ... :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7593704592801610772:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:13.409343Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:16.673875Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704627161350017:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.673990Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704627161349999:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.674236Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.675122Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593704627161350028:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.675227Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:16.681793Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:16.700852Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7593704627161350026:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:16.803781Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7593704627161350079:2547] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18096, MsgBus: 21232 2026-01-10T12:12:18.381450Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7593704638294291764:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:18.381525Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:18.714722Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:18.714856Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:18.719604Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:18.721477Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:18.728221Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7593704638294291654:2081] 1768047138373146 != 1768047138373149 2026-01-10T12:12:18.758144Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:18.922678Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:19.008404Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:19.008439Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:19.008451Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:19.008599Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:19.425190Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:20.102134Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:23.381041Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593704638294291764:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:23.381162Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:25.571047Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593704668359063623:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.571206Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.571705Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593704668359063636:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.571711Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593704668359063635:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.571778Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:25.578906Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:25.603354Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593704668359063639:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:12:25.662151Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593704668359063690:2542] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:25.729724Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:25.833776Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:31.124789Z node 11 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 11, TabletId: 72075186224037888 not found 2026-01-10T12:12:31.176516Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:31.652931Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [11:7593704694128867991:2444], TxId: 281474976715671, task: 1. Ctx: { CheckpointId : . TraceId : 01kekx5256egtz3vgbd8paaqg5. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=ZTNkZThmMy00MzM1YzU5Zi1hMThjMjVhYy05ZDM0Yjc1MA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2026-01-10T12:12:31.655548Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7593704694128867992:2445], TxId: 281474976715671, task: 2. Ctx: { CheckpointId : . TraceId : 01kekx5256egtz3vgbd8paaqg5. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=ZTNkZThmMy00MzM1YzU5Zi1hMThjMjVhYy05ZDM0Yjc1MA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7593704694128867988:2439], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:12:31.656217Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=11&id=ZTNkZThmMy00MzM1YzU5Zi1hMThjMjVhYy05ZDM0Yjc1MA==, ActorId: [11:7593704694128867978:2439], ActorState: ExecuteState, LegacyTraceId: 01kekx5256egtz3vgbd8paaqg5, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: \"pg_proc\"\n\n" severity: 1 } trace_id# >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] Test command err: 2026-01-10T12:10:20.098620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:20.098676Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:20.345124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:21.631034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:10:21.935422Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:21.944793Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:21.945513Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3565878612551250117 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:22.057428Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:22.058005Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:22.058249Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 675261336993531361 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:22.129611Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:22.130185Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:22.130419Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5369647185691994001 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:22.198826Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:22.199365Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:22.199619Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1910622055874954361 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:22.247692Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:22.256429Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:22.256702Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp4KTYJ4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9001080390056240078 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveMo ... ata# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:12:31.274095Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:12:31.275335Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:12:31.275601Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 840950869502949411 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:12:31.313080Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:12:31.313778Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:12:31.314072Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8400830112244272672 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:12:31.362902Z node 150 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:12:31.363533Z node 150 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:12:31.363887Z node 150 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmpXs73pH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13887244972586500039 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:12:31.810150Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:31.810276Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:31.915560Z node 145 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-10T12:12:35.942630Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:35.942744Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:36.081998Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:37.106981Z node 160 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:12:37.107821Z node 160 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp7iRCo1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:12:37.108256Z node 160 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp7iRCo1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a0c/r3tmp/tmp7iRCo1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10895491585832225411 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TBackupCollectionTests::DropCollectionWithIncrementalBackup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TBackupCollectionTests::DropLargeBackupCollection [GOOD] >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-10T12:12:40.186467Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.186517Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.186549Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.187020Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:40.187066Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.187127Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.193540Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005429s 2026-01-10T12:12:40.194130Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.194623Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:40.194705Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.195781Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.195812Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.195831Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.196194Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:40.196230Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.196276Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.196407Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007711s 2026-01-10T12:12:40.196953Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.198625Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:40.198735Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.199682Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.199737Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.199765Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.200249Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:12:40.200291Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.200323Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.200408Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.280024s 2026-01-10T12:12:40.200861Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.201087Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:40.201137Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.201849Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.201868Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.201934Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.202417Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-10T12:12:40.202455Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.202479Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.202561Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.229222s 2026-01-10T12:12:40.202955Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.203431Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-10T12:12:40.203532Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.204547Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.204570Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.204603Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.204921Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.205337Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:40.217573Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.217921Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-10T12:12:40.217955Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.217982Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.218044Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.269605s 2026-01-10T12:12:40.218579Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:12:40.219940Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.219972Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.220009Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.220529Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.220979Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:40.221174Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.222048Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:40.222910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:40.223347Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:40.223558Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-10T12:12:40.223659Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:40.223753Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:40.223788Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-10T12:12:40.223831Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:12:40.223856Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:12:40.227299Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.227345Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.227368Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.227679Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.228109Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:40.228274Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.228948Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:40.329650Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.330013Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:12:40.330130Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:40.330211Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:12:40.330288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:12:40.431381Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:12:40.433147Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-10T12:12:40.434434Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.434457Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.434477Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:40.434932Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:40.435380Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:40.435612Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.436223Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:40.545241Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:40.546353Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:12:40.546449Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:40.546493Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-10T12:12:40.546593Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-10T12:12:40.546697Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-10T12:12:40.546932Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-10T12:12:40.547031Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:12:40.547164Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |87.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> LocalTableWriter::WaitTxIds >> TBackupCollectionTests::DropCollectionDuringActiveBackup [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup >> LocalTableWriter::ConsistentWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-10T12:12:42.671876Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.671909Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.671934Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.680414Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.684399Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.697950Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.699186Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.700708Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.700736Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.700758Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.704495Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.708533Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.708830Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.712280Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.712743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:12:42.717230Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.717264Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.717289Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.726322Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.736405Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.736622Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.737025Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.738831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.742232Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:42.742415Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.742482Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:12:42.743886Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.744203Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.744236Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.744631Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.750276Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.750516Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.750899Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.751439Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.752172Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:42.752306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.752356Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:12:42.753225Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.753272Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.753305Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.760476Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.772460Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.772701Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.776320Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.776986Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.777487Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:42.777591Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.777646Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:12:42.778916Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.778948Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.778970Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.788450Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.789217Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.789376Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.792999Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.793746Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.796286Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:42.796481Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.796538Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:12:42.797767Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.797801Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.797824Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.799499Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.813656Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.813868Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.815551Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.817363Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.817846Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:42.817941Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.817994Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:12:42.825319Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.825353Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.825379Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:42.830129Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:42.837353Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:42.837583Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:42.837985Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:42.839346Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:42.840339Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:42.840731Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-10T12:12:42.840908Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:12:42.844718Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:42.844776Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:12:42.844809Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-10T12:12:42.844830Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-10T12:12:42.844885Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-10T12:12:42.844915Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-10T12:12:42.845143Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2026-01-10T12:12:42.845351Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } |87.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |87.8%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28389, MsgBus: 24583 2026-01-10T12:11:50.419888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704515686502841:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:50.419953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:50.783738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:50.783852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:50.825963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:50.864752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:50.925204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:50.926114Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704515686502604:2081] 1768047110373666 != 1768047110373669 2026-01-10T12:11:51.048809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:51.048859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:51.048871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:51.048986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:51.144173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:51.420253Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:51.662142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:51.669338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:54.270446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704532866372662:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.270537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704532866372673:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.270659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.272212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704532866372690:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.272292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:54.280214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:54.324336Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704532866372689:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:11:54.461515Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704532866372742:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:54.993335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:11:55.235239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:55.235486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:55.235508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:55.235512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:55.235757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:55.235758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:55.235866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:55.235883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:55.236034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:55.236157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:55.236239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:55.236380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:55.236487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:55.236586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:55.236684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:11:55.236783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:11:55.236894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593704537161340210:2340];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:11:55.237638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:55.237793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:55.237884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:55.238006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593704537161340208:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:55.238095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.148009Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.148236Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.148269Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.151526Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.151604Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.151625Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.157309Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.157371Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.157389Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.174087Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.174174Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.174196Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:12:29.216552Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx4jzc7zbrhrw2n8wewr86", SessionId: ydb://session/3?node_id=2&id=MzIwYzgwNTctMWRhYThiOGUtYWI3ZGM3NTAtZjMyZWY5Mzg=, Slow query, duration: 10.379643s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b Trying to start YDB, gRPC: 3294, MsgBus: 23858 2026-01-10T12:12:31.827817Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704694687071939:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:31.827865Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:31.854373Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:31.947087Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:31.973966Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:31.974121Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:31.998708Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:32.103671Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:32.156880Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:32.156919Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:32.156929Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:32.157034Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:32.722118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:32.732536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:32.843748Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:36.210926Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704716161909169:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:36.211157Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:36.211615Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704716161909205:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:36.211661Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593704716161909206:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:36.211703Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:36.216589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:36.229591Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593704716161909209:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:12:36.317694Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593704716161909260:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:36.411932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:36.569039Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:37.404515Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593704694687071939:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.404871Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:37.808032Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> LocalTableWriter::DataAlongWithHeartbeat |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |87.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-10T12:12:41.147779Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.147811Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.147867Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:41.148668Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:41.149252Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:41.162640Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.163645Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-10T12:12:41.165182Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:41.165412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:12:41.165785Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:12:41.166018Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:12:41.166151Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:41.166204Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:12:41.166244Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:12:41.166444Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.166485Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:12:41.166533Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:12:41.166555Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:12:41.166677Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.166782Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-10T12:12:41.166805Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-10T12:12:41.166825Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:12:41.166909Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.166937Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-10T12:12:41.166966Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-10T12:12:41.166997Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:12:41.167095Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2026-01-10T12:12:41.172937Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.172990Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.173017Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:41.173566Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:41.174235Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:41.174470Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.178360Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-10T12:12:41.179418Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:12:41.179668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:12:41.180046Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:12:41.180316Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:12:41.180448Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:41.180498Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:12:41.180532Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:12:41.180733Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.180773Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:12:41.180799Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:12:41.180824Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:12:41.180987Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.181068Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-10T12:12:41.181089Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-10T12:12:41.181108Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:12:41.181194Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-10T12:12:41.181224Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-10T12:12:41.181245Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-10T12:12:41.181266Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00 ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:12:43.806981Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2026-01-10T12:12:43.894755Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:12:43.894847Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:12:43.894934Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:43.907736Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:43.924382Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:43.924806Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-10T12:12:43.925377Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-10T12:12:44.031126Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-10T12:12:44.032638Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:44.034815Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:12:44.042010Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:12:44.043094Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-10T12:12:44.055145Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-10T12:12:44.056001Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-10T12:12:44.056960Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-10T12:12:44.057832Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-10T12:12:44.067085Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-10T12:12:44.068058Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-10T12:12:44.068291Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-10T12:12:44.068551Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:12:44.073060Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2026-01-10T12:12:44.100322Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.101574Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.101647Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:44.102167Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:44.105002Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:44.105243Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.107461Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:44.108195Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-10T12:12:44.109738Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.109794Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.109853Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:44.110383Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:44.110959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:44.111131Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.111852Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:44.124738Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:44.124978Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:44.125057Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:12:44.125344Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> KqpSinkLocks::DifferentKeyUpdate [GOOD] |87.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> LocalTableWriter::DecimalKeys >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |87.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> LocalTableWriter::WriteTable >> LocalTableWriter::ApplyInCorrectOrder >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:51.425029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:51.550222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:51.561091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:51.561705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:51.561839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:52.062022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:52.062197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:52.069936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046927630175 != 1768046927630178 2026-01-10T12:08:52.086019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:52.138379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:52.231181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:52.715847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.715937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.716012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.716144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-10T12:08:52.716191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:08:52.864989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:08:52.865317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.865605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:08:52.865680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:08:52.865946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:08:52.866091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:52.866225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.867155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:08:52.867359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:08:52.867419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.867460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.867686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.867770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.867868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.867931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:08:52.867977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:08:52.868019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:08:52.868329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.868992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.869048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.869214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.869259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.869328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.869383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:08:52.869428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:08:52.869501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.869898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.869933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-10T12:08:52.870095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.870132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:08:52.870186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.870228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:08:52.870276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:08:52.870310Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-10T12:08:52.870348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:08:52.874413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:08:52.874965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:08:52.875022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:08:52.875174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:08:52.875491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-10T12:08:52.875976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.876032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:08:52.876090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:08:52.876274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-10T12:08:52.876324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:08:52.876405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 7205759404664 ... 337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.650238Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.650263Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.660849Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:12:42.660938Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:12:42.660976Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-01-10T12:12:42.661070Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-10T12:12:42.661112Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-10T12:12:42.661245Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 60 shard idx 72057594046644480:11 data size 271 row count 13 2026-01-10T12:12:42.661316Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 60], pathId map=SequenceTable, is column=0, is olap=0, RowCount 13, DataSize 271, with borrowed parts 2026-01-10T12:12:42.661351Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2026-01-10T12:12:42.661459Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2026-01-10T12:12:42.661516Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 68 shard idx 72057594046644480:17 data size 368 row count 16 2026-01-10T12:12:42.661564Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 68], pathId map=SequenceTable, is column=0, is olap=0, RowCount 16, DataSize 368, with borrowed parts 2026-01-10T12:12:42.661595Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2026-01-10T12:12:42.661655Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2026-01-10T12:12:42.661730Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:12:42.672382Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:12:42.672481Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:12:42.672518Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:12:42.683011Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.683091Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.683128Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.683167Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.683201Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.707096Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.707176Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.707213Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.707254Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.707287Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.717839Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.717922Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.717959Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.717997Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.718028Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.728679Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.728756Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.728790Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.728821Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.728847Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.749931Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.750011Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.750042Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.750078Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.750112Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.760653Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.760731Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.760764Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.760801Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.760832Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.771342Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.771433Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.771471Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.771508Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.771537Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.794790Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.794877Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.794909Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.794946Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.794978Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.806130Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.806201Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.806246Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.806283Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.806313Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:12:42.819665Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:12:42.819748Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.819784Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:12:42.819822Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:12:42.819854Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-10T12:12:43.189296Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037907, clientId# [11:3251:4466], serverId# [11:3252:4467], sessionId# [0:0:0] { items { uint32_value: 6 } items { uint32_value: 600 } }, { items { uint32_value: 7 } items { uint32_value: 70 } }, { items { uint32_value: 8 } items { uint32_value: 80 } }, { items { uint32_value: 9 } items { uint32_value: 90 } } |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 11013, MsgBus: 62098 ... waiting for SysViewsRoster update finished 2026-01-10T12:11:55.800838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:56.073714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:56.117164Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:56.117667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:56.117784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:56.663162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:56.663295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:56.679421Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047111776415 != 1768047111776418 2026-01-10T12:11:56.701200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.769459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:56.939139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:57.364925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:57.365043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:57.365133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:57.365834Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:57.378434Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:57.884950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:58.014566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.291986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:58.943830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:59.306549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.268502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1909:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.269058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.270311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.270447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.308452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.512577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:00.825042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.114560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.376200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.681322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.980938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.347877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.896301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.896440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.896954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.897038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.897125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:02.904679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:03.066506Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2805:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:12:03.131710Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2865:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:07.038496Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MWYwOTQ1NzctMjE2ODVlZWMtODJjYmM5N2MtNTVkN2FhYzk=, ActorId: [1:3149:4457], ActorState: ExecuteState, LegacyTraceId: 01kekx49b588wvx53cyazy5ncv, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ... 10T12:12:34.225320Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037946;self_id=[3:7593704685170016060:2341];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037946; 2026-01-10T12:12:34.226652Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037924;self_id=[3:7593704685170016270:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037924; 2026-01-10T12:12:34.231722Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037900;self_id=[3:7593704685170016256:2377];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037900; 2026-01-10T12:12:34.237360Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037927;self_id=[3:7593704685170016354:2397];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037927; 2026-01-10T12:12:34.237367Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037951;self_id=[3:7593704685170015932:2334];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037951; 2026-01-10T12:12:34.253248Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037914;self_id=[3:7593704685170016251:2372];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037914; 2026-01-10T12:12:34.253324Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037929;self_id=[3:7593704685170016223:2362];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037929; 2026-01-10T12:12:34.253369Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037901;self_id=[3:7593704685170016336:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037901; 2026-01-10T12:12:34.260389Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037933;self_id=[3:7593704685170016217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037933; 2026-01-10T12:12:34.262167Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037904;self_id=[3:7593704685170016342:2392];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037904; 2026-01-10T12:12:34.262234Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037932;self_id=[3:7593704685170016271:2385];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037932; 2026-01-10T12:12:34.270661Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037912;self_id=[3:7593704685170016269:2383];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037912; 2026-01-10T12:12:34.270747Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037903;self_id=[3:7593704685170015931:2333];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037903; 2026-01-10T12:12:34.270783Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037917;self_id=[3:7593704685170016116:2343];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037917; 2026-01-10T12:12:34.270878Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037947;self_id=[3:7593704685170016107:2342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037947; 2026-01-10T12:12:34.276779Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037895;self_id=[3:7593704685170015940:2336];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037895; 2026-01-10T12:12:34.277182Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037925;self_id=[3:7593704685170016335:2387];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037925; 2026-01-10T12:12:34.278750Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037913;self_id=[3:7593704685170016250:2371];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037913; Trying to start YDB, gRPC: 20294, MsgBus: 5617 2026-01-10T12:12:35.479086Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704711080351056:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:35.479135Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:35.521345Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:35.581603Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:35.582919Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593704711080351028:2081] 1768047155477579 != 1768047155477582 2026-01-10T12:12:35.621001Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:35.621104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:35.632678Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:35.713587Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:35.713618Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:35.713626Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:35.713719Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:35.774542Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:36.447287Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:36.453085Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:36.491123Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:39.828850Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704728260221092:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:39.828908Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704728260221108:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:39.828956Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:39.830453Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593704728260221112:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:39.830572Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:39.834733Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:39.850589Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593704728260221111:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:39.956180Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593704728260221164:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:40.040760Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:40.099382Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:40.969093Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593704711080351056:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:40.969200Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:41.356400Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert >> LocalTableWriter::WaitTxIds [GOOD] |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> LocalTableWriter::SupportedTypes >> LocalTableWriter::ConsistentWrite [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |87.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2026-01-10T12:12:43.420357Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704744516682370:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.420542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:43.736151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:43.744793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:43.744896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:43.882355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:43.907523Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704744516682148:2081] 1768047163393922 != 1768047163393925 2026-01-10T12:12:43.913519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:43.936306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:44.236870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:44.236901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:44.236908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:44.237002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:44.422213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:44.527257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:44.536036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:44.540288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:44.684058Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handshake: worker# [1:7593704748811650369:2551] 2026-01-10T12:12:44.684592Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:44.684960Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:44.685047Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Send handshake: worker# [1:7593704748811650369:2551] 2026-01-10T12:12:44.685659Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:44.698278Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-10T12:12:44.698499Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-10T12:12:44.698690Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704748811650372:2550] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:44.698733Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:44.698878Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704748811650372:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:44.702405Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704748811650372:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:44.702497Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:44.702542Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-10T12:12:45.686561Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-10T12:12:45.686739Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2026-01-10T12:12:45.686855Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704748811650372:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-10T12:12:45.689427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704748811650372:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:45.689540Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:45.689590Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704748811650368:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2026-01-10T12:12:43.626748Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704744118397441:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.632698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:44.064408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:44.119464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:44.119578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:44.200780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:44.251676Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:44.281866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:44.552648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:44.552674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:44.552681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:44.552789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:44.656951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:44.898059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:44.905018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:44.910395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:45.079808Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handshake: worker# [1:7593704748413365531:2491] 2026-01-10T12:12:45.080116Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:45.080401Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:45.080456Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Send handshake: worker# [1:7593704748413365531:2491] 2026-01-10T12:12:45.081079Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:45.086551Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-10T12:12:45.086762Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2026-01-10T12:12:45.087042Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:45.087085Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:45.087197Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:45.092574Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:45.092648Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:45.092702Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2026-01-10T12:12:45.096290Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:45.100325Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:45.112297Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2026-01-10T12:12:45.112475Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2026-01-10T12:12:45.112643Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-10T12:12:45.132242Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:45.132326Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:45.132371Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2026-01-10T12:12:45.140695Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:45.140945Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2026-01-10T12:12:45.141068Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-10T12:12:45.158969Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704752708332922:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:45.159046Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:45.159098Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2026-01-10T12:12:45.159695Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704752708332919:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> LocalTableWriter::StringEscaping >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2026-01-10T12:12:44.910175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704750100570865:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:44.910803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:45.326851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:45.349865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:45.349983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:45.396994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:45.525791Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:45.569176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:45.820063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:45.820166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:45.820178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:45.820281Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:45.936727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:46.293405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:46.302029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:46.306205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:46.460656Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handshake: worker# [1:7593704758690506340:2553] 2026-01-10T12:12:46.461013Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:46.461334Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:46.461370Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Send handshake: worker# [1:7593704758690506340:2553] 2026-01-10T12:12:46.461980Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:46.467430Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-10T12:12:46.467590Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-10T12:12:46.467769Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704758690506343:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:46.467826Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:46.467933Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704758690506343:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:46.515823Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704758690506343:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:46.515901Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:46.515982Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704758690506339:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2026-01-10T12:12:45.495805Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704753476497996:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:45.498660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:45.923224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:45.933986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:45.934153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:46.038423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:46.116224Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704753476497945:2081] 1768047165456696 != 1768047165456699 2026-01-10T12:12:46.140652Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:46.156182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:46.496831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:46.496864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:46.496873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:46.496954Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:46.501108Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:46.803401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:46.811759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:46.816404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:47.001380Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handshake: worker# [1:7593704757771466075:2490] 2026-01-10T12:12:47.001832Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:47.002216Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:47.002265Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Send handshake: worker# [1:7593704757771466075:2490] 2026-01-10T12:12:47.005635Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:47.005878Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2026-01-10T12:12:47.006125Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704762066433465:2551] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:47.006199Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:47.006290Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704762066433465:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2026-01-10T12:12:47.013133Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704762066433465:2551] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:47.013240Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:47.013313Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704757771466166:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> LocalTableWriter::WriteTable [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest >> TBackupCollectionTests::DropCollectionValidationCases [GOOD] >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> LocalTableWriter::SupportedTypes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2026-01-10T12:12:47.121455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704761023365016:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:47.131086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:47.180490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:47.525184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:47.558729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:47.558814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:47.561148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:47.700871Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:47.734018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:48.139891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:48.203889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:48.203920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:48.203930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:48.204062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:48.562016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:48.575782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.718744Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handshake: worker# [1:7593704765318333038:2494] 2026-01-10T12:12:48.718996Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:48.719164Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:48.719194Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Send handshake: worker# [1:7593704765318333038:2494] 2026-01-10T12:12:48.719468Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:48.724021Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-10T12:12:48.733774Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-10T12:12:48.734011Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765318333134:2555] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:48.734051Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:48.734127Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765318333134:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:48.741320Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765318333134:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:48.741427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:48.741481Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-10T12:12:48.745448Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:48.748463Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-10T12:12:48.748589Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2026-01-10T12:12:48.748701Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765318333134:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:48.756508Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765318333134:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:48.756571Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:48.756614Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765318333131:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TLocksTest::NoLocksSet >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2026-01-10T12:12:46.632171Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704757048446999:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:46.632234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:46.702678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:47.135279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:47.194595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:47.194732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:47.206363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:47.376428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:47.376620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704757048446891:2081] 1768047166592619 != 1768047166592622 2026-01-10T12:12:47.386949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:47.660196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:47.916952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:47.917002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:47.917012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:47.917115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:48.360490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:48.371730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.492205Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handshake: worker# [1:7593704765638382314:2489] 2026-01-10T12:12:48.492588Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:48.492858Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:48.492919Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Send handshake: worker# [1:7593704765638382314:2489] 2026-01-10T12:12:48.493705Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 35b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 23b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:48.493935Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 35 },{ Order: 3 BodySize: 23 }] } 2026-01-10T12:12:48.494108Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765638382408:2550] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:48.494148Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:48.494246Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765638382408:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 35b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 23b }] } 2026-01-10T12:12:48.500803Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704765638382408:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:48.500880Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:48.500936Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704765638382405:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TFlatTest::SelectRangeItemsLimit >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2026-01-10T12:12:48.251621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704766881525139:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:48.260459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:48.619727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:48.675908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:48.676034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:48.716518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:48.755597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:48.759967Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704766881524925:2081] 1768047168231807 != 1768047168231810 2026-01-10T12:12:48.838368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:49.136173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:49.136201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:49.136208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:49.136297Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:49.224314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:49.508998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:49.522803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:49.747273Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handshake: worker# [1:7593704771176493059:2490] 2026-01-10T12:12:49.747675Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:49.748093Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:49.748135Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Send handshake: worker# [1:7593704771176493059:2490] 2026-01-10T12:12:49.752695Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:49.753846Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2026-01-10T12:12:49.754251Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704771176493153:2550] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:49.754331Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:49.754773Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704771176493153:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2026-01-10T12:12:49.798229Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704771176493153:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:49.798392Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:49.798482Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704771176493149:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TFlatTest::Ls |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TFlatTest::SelectRangeForbidNullArgs2 |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> LocalTableWriter::StringEscaping [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> TBackupCollectionTests::DropErrorRecoveryTest [GOOD] >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation >> TLocksFatTest::PointSetBreak >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> TObjectStorageListingTest::Listing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] Test command err: 2026-01-10T12:12:50.680323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704774252783483:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:50.680466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:51.032737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:51.057978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:51.058107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:51.150755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:51.210757Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704774252783455:2081] 1768047170675546 != 1768047170675549 2026-01-10T12:12:51.225500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:51.241867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:51.628912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:51.628947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:51.628954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:51.629035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:51.696118Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:52.016010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:52.028032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:52.045366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:52.192012Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handshake: worker# [1:7593704782842718882:2490] 2026-01-10T12:12:52.192331Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:12:52.192568Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:12:52.192606Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Send handshake: worker# [1:7593704782842718882:2490] 2026-01-10T12:12:52.192914Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-10T12:12:52.193039Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-10T12:12:52.193169Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704782842718979:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:12:52.193200Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:52.193270Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704782842718979:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-10T12:12:52.195906Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7593704782842718979:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-10T12:12:52.195962Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:12:52.196038Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7593704782842718976:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TCancelTx::ImmediateReadOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck |87.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TFlatTest::SelectRangeReverseItemsLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |87.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> TLocksFatTest::RangeSetNotBreak [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2026-01-10T12:12:37.347250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704718342877182:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.347297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.387450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:37.724194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:37.773544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:37.773670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:37.873931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:37.896792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:37.921436Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704718342877156:2081] 1768047157345933 != 1768047157345936 2026-01-10T12:12:37.980157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:38.241750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:38.268273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:38.351164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:38.361273Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:38.795762Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845413:2585] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:38.795829Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845413:2585] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:38.804258Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845426:2595] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:38.804320Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845426:2595] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:38.817307Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845439:2605] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:38.817395Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845439:2605] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:38.851485Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845465:2625] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:38.851558Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845465:2625] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:38.862981Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845478:2635] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:38.863037Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845478:2635] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:38.875042Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593704722637845491:2645] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:38.875110Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593704722637845491:2645] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:41.621721Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704736311409915:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:41.621772Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:41.644617Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:41.813573Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:41.816061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:41.816164Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:41.821468Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:41.835509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.018805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:42.382788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:42.400769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:42.415380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:42.632241Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:46.642435Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704756609746874:2132];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:46.642508Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:46.786191Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:46.909448Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:46.911060Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704756609746782:2081] 1768047166599407 != 1768047166599410 2026-01-10T12:12:46.924722Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:46.924824Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:46.962684Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:46.996593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.249560Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:47.257273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:47.268587Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:47.704258Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:47.855548Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715042:2587] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:47.855647Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715042:2587] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:47.880940Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715058:2600] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:47.881030Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715058:2600] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:47.903563Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715072:2611] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-10T12:12:47.903671Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715072:2611] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:47.945640Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715100:2633] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:47.945737Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715100:2633] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:47.969749Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715114:2644] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:47.969845Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715114:2644] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:47.989480Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7593704760904715127:2654] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-10T12:12:47.989582Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593704760904715127:2654] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:51.871672Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704779842735397:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:51.908807Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:51.971616Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:52.181082Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:52.184240Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:52.196510Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:52.196598Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:52.233541Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:52.592700Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:52.685963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:52.701598Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:52.715138Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:52.912360Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:53.127448Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-10T12:12:53.127674Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7593704788432670904:2586] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-10T12:12:53.148620Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-10T12:12:53.149085Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7593704788432670918:2594] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c >> TBackupCollectionTests::DropCollectionDuringActiveOperation [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 |88.0%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2026-01-10T12:12:37.077372Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704720742934305:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.077421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.354898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:37.361089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:37.367759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:37.478543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:37.498040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:37.517602Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704720742934285:2081] 1768047157076412 != 1768047157076415 2026-01-10T12:12:37.612739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:37.800041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:37.808586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:37.899216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:38.086181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:38.119516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:38.147329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:42.099592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704720742934305:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:42.099831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:47.279262Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704761044933791:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:47.279314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:47.390196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.509363Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:47.515018Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704761044933736:2081] 1768047167264063 != 1768047167264066 2026-01-10T12:12:47.631709Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:47.631818Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:47.646171Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:47.683915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.862842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:47.869149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:47.893478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:12:47.905321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.042158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.127575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.291182Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:52.280501Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704761044933791:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:52.280590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |88.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> ColumnShardTiers::DSConfigsStub [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> ColumnShardTiers::DSConfigs [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> TBackupCollectionTests::ConcurrentDropProtectionTest [GOOD] >> TBackupCollectionTests::BackupServiceDirectoryValidation |88.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 28762, MsgBus: 24096 2026-01-10T12:11:50.044550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704516620950227:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:50.044693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:50.095528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:50.478418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:50.482820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:50.482919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:50.485134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:50.508487Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704516620950042:2081] 1768047110012452 != 1768047110012455 2026-01-10T12:11:50.511950Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:50.649768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:50.687432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:50.687460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:50.687473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:50.687570Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:51.048330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:51.286776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:51.307354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:11:53.757827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704529505852808:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.757956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.758228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704529505852829:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.763495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:53.763785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704529505852831:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.763880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:53.779941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:11:53.780261Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704529505852832:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:11:53.948723Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704529505852884:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:54.630448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:11:55.097392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:55.097715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:55.097996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:55.098130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:55.098274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:55.098414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:55.098515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:55.098632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:55.098727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:55.098840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:55.098935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:11:55.099058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:11:55.099188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7593704533800820328:2343];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:11:55.108297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:55.108374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:55.108631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:55.108746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:55.108868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:55.108968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:55.109090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704533800820410:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;ev ... 704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043598Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043614Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043631Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043648Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043664Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043679Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043695Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043711Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043726Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043742Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.043760Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046291Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046330Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046347Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046363Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046377Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046393Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046410Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046426Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046441Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046459Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046476Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046493Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046509Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046524Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046539Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046572Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046589Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046605Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046620Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046634Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046647Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046661Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046674Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046687Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046701Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046715Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046729Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046743Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046759Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046775Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046788Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046805Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046819Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046838Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046850Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046864Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046876Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046890Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046904Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046917Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046929Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046942Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046958Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046970Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046983Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.046997Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 2026-01-10T12:12:59.047010Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704811527436490:2969], SessionActorId: [4:7593704811527436232:2969], StateRollback: unknown message 278003713 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |88.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |88.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TFlatTest::LsPathId [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> KqpRm::NodesMembershipByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:11:11.046673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:11.165117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:11.174030Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:11.174382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:11.174457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:11.541057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:11.541227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:11.547887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047068406824 != 1768047068406827 2026-01-10T12:11:11.561585Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:11.613430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:11.712760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:12.257219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:12.257983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:12.258035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:12.258076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:12.258665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:12.274846Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:12.423404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-10T12:11:12.562669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:12.563042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:12.563286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:12.563391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:12.563487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:12.563571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:12.563676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:12.563787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:12.563916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:12.564023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:12.564140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:11:12.564265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:11:12.564406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:11:12.584335Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-01-10T12:11:12.613654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:12.613764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:12.613992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:12.614101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:12.614196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:12.614312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:12.614425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:12.614531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:12.614624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:12.614719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:12.614830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:11:12.614945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:11:12.615055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:11:12.618628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:11:12.618708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:11:12.618808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:11:12.618847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:11:12.619037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:11:12.619088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:48.956013Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:12:48.956049Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2026-01-10T12:12:48.956946Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:12:48.956994Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2026-01-10T12:12:48.957026Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2026-01-10T12:12:48.957073Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037888 2026-01-10T12:12:48.957133Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:48.957179Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:12:48.957211Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2026-01-10T12:12:48.957246Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:12:48.957272Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2026-01-10T12:12:48.957298Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2026-01-10T12:12:48.957331Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037889 2026-01-10T12:12:48.957392Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:48.957429Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:12:48.957461Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2026-01-10T12:12:48.957490Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:12:48.957515Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2026-01-10T12:12:48.957539Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2026-01-10T12:12:48.957572Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037890 2026-01-10T12:12:48.957609Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:48.957965Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:12:48.958072Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:12:48.958145Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2026-01-10T12:13:00.841475Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-10T12:13:00.841733Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-10T12:13:00.841782Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-10T12:13:00.841823Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-10T12:13:00.842376Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:13:00.842458Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2026-01-10T12:13:00.842523Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.842569Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.842668Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:00.842726Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:13:00.842759Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2026-01-10T12:13:00.842791Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.842818Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.842888Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:00.842938Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:13:00.842971Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2026-01-10T12:13:00.843008Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.843036Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.843077Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:00.843259Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:13:00.843296Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-10T12:13:00.843330Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.843361Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:00.843404Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:00.843783Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:00.843987Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:00.844069Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 2026-01-10T12:13:02.162209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:13:02.162290Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |88.0%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:10:59.868046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:59.993880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:00.005634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:00.006170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:00.006296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:00.446802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:00.446985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:00.453935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047056934100 != 1768047056934103 2026-01-10T12:11:00.467185Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:00.517444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:00.611098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:01.121487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:01.122168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:01.122217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:01.122257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:01.122777Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:01.137610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:13.105078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.105435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.105912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:980:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.105987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.109534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:13.317650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1072:2899], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.317767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.318219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1076:2903], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.318296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.318360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1079:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:13.322909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:13.446847Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1081:2908], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:13.764052Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1173:2973] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:14.326466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:14.814575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:15.458136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:16.161186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:16.696067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:17.881405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.192707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-10T12:11:22.430147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:22.430229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:33.526926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:12:49.837598Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:49.838208Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:12:49.838252Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-10T12:12:49.838284Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-01-10T12:12:49.838317Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-01-10T12:12:49.838376Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-01-10T12:12:49.838408Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:12:49.838455Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:12:49.838916Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3109:4469];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:12:49.839033Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3111:4471];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:12:49.839212Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3119:4477];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-10T12:13:01.232757Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.232834Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.232864Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.233014Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.233279Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.233337Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-10T12:13:01.233383Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233419Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233476Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.233528Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.233552Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-10T12:13:01.233586Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233626Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233683Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.233727Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.233767Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-10T12:13:01.233805Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233829Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233861Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.233930Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.233949Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:01.233972Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.233993Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.234026Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.234380Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.234649Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:01.234906Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.234935Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:01.234961Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.235020Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.235403Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:01.235432Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:01.235458Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.235493Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:01.235524Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:01.235749Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3109:4469];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:01.235852Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3111:4471];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:01.235981Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3119:4477];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2026-01-10T12:12:53.398945Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704789538899616:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:53.399267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:53.462713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:53.782482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:53.790015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:53.790129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:53.870290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:53.887664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:53.889257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704789538899397:2081] 1768047173358192 != 1768047173358195 2026-01-10T12:12:53.967594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:54.169086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:54.177041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:54.191884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:54.292412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:54.394079Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:58.448690Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:58.468113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:58.638222Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:58.704306Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:58.704427Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:58.710527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:58.740191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:59.024276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:59.066182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:59.400220Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TBackupCollectionTests::BackupServiceDirectoryValidation [GOOD] >> TBackupCollectionTests::BackupWithIndexes >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> KqpRm::ResourceBrokerNotEnoughResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2026-01-10T12:12:54.769595Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704792160028824:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:54.769678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:54.800029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:55.139025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:55.139390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:55.143427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:55.316395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:55.329380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704792160028795:2081] 1768047174768046 != 1768047174768049 2026-01-10T12:12:55.360866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:55.579574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:55.597378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:55.602028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:55.686833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:55.792826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:00.060314Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:00.068194Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:00.088329Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704814804134287:2081] 1768047179908613 != 1768047179908616 2026-01-10T12:13:00.156747Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:00.158555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:00.158653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:00.190853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:00.265236Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:00.461457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:00.468616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:00.484692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:00.495112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2026-01-10T12:12:55.511382Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704798055951225:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:55.511438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:55.869860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:55.879905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:55.879994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:55.998881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:56.082627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704798055951084:2081] 1768047175462731 != 1768047175462734 2026-01-10T12:12:56.096426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:56.100291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:56.329487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:56.344460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:56.399116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:56.533082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:56.600177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:12:56.635348Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704802350919337:2596] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-10T12:12:56.642201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:12:56.674946Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704802350919377:2630] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-10T12:13:01.292479Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704820968810654:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:01.292534Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:01.389929Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:01.564225Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704820968810463:2081] 1768047181274438 != 1768047181274441 2026-01-10T12:13:01.566465Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:01.594285Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:01.594371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:01.606999Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:01.732308Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:02.121594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:02.136766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:02.144937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:02.296291Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:02.324310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:13:02.365392Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710660: 2026-01-10T12:13:02.365812Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7593704825263778726:2595] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:13:02.365893Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7593704825263778726:2595] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:13:02.365913Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7593704825263778726:2595] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-10T12:13:02.369522Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710661: 2026-01-10T12:13:02.369816Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7593704825263778734:2600] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:13:02.369897Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7593704825263778734:2600] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:13:02.369913Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7593704825263778734:2600] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-10T12:13:02.378690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> KqpSinkMvcc::OlapNamedStatement >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2026-01-10T12:12:54.691585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704790963559182:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:54.691876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:54.734221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:55.243012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:55.251759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:55.251862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:55.341151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:55.385588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:55.387994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704790963559136:2081] 1768047174689517 != 1768047174689520 2026-01-10T12:12:55.480799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:55.659417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:55.674241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:55.696947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:55.704888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:12:55.782392Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704795258527284:2528] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T12:13:00.644187Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:00.644623Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:00.699317Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:00.699421Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:00.704332Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:00.712309Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704818502922342:2081] 1768047180473629 != 1768047180473632 2026-01-10T12:13:00.718668Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:00.940205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:01.177763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:01.186514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:01.229046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-10T12:13:01.413691Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:13:01.510890Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:13:01.510992Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:13:01.511078Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:13:01.511161Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:13:01.536226Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-10T12:13:01.536368Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:13:01.563102Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-10T12:13:01.563360Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:13:01.565223Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-10T12:13:01.565421Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:13:01.565500Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:13:01.566112Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:13:01.566573Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:13:01.569569Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:13:01.569638Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:13:01.569690Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:13:01.569779Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:13:01.571459Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:13:01.573012Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:13:01.573081Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:01.573128Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:13:01.573212Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:13:01.573252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:01.573332Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:13:01.573429Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:13:01.573473Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:13:01.573508Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:13:01.573542Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:13:01.573591Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:13:01.573884Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:13:01.573934Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-10T12:13:01.573996Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:13:01.574227Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:13:01.574536Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:13:01.576693Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:13:01.576743Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-10T12:13:01.576779Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:13:01.576825Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:13:01.577952Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:13:01.579126Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:13:01.579175Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:13:01.579205Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:13:01.579247Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:13:01.579275Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:01.579317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:13:01.579374Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-10T12:13:01.579423Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:13:01.579455Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:13:01.579483Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:13:01.579522Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:13:01.579731Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:13:01.579770Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-10T12:13:01.579836Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:13:01.580134Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:13:01.580357Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:13:01.580582Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:13:01.580690Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... ot data from cache. Partition 0 offset 6 partno 0 count 1 parts_count 10 source 1 size 5243650 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-10T12:13:06.760619Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 17. All 2 blobs are from cache. 2026-01-10T12:13:06.760704Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' 2026-01-10T12:13:06.760752Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 6 partno 0 count 1 parts 10 suffix '0' 2026-01-10T12:13:06.760848Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-10T12:13:06.761004Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761062Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761100Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761135Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761194Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 1 size 635356 from pos 0 cbcount 2 2026-01-10T12:13:06.761232Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761268Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761303Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761334Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761367Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761401Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761436Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761469Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761501Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761536Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 1 size 123358 from pos 0 cbcount 1 2026-01-10T12:13:06.761611Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761647Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761679Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761716Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761747Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761778Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761812Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761844Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761874Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761914Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:13:06.761948Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 1 size 123358 from pos 0 cbcount 1 2026-01-10T12:13:06.762113Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-10T12:13:06.762154Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-10T12:13:06.762202Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 4:6 isTruncatedBlob 0 2026-01-10T12:13:06.776916Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 4 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 1 2026-01-10T12:13:06.803833Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 5 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:13:06.805733Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-10T12:13:06.807122Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00000_0000000001_00016 2026-01-10T12:13:06.807241Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000001_00006_0000000002_00014 2026-01-10T12:13:06.807328Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-10T12:13:06.807639Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-10T12:13:06.807715Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:13:06.807853Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 283 2026-01-10T12:13:06.807951Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 6 count 2 size 271 2026-01-10T12:13:06.807993Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00016(+) to d0000000000_00000000000000000000_00000_0000000001_00016(+) 2026-01-10T12:13:06.808028Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000001_00006_0000000002_00014(+) to d0000000000_00000000000000000001_00006_0000000002_00014(+) 2026-01-10T12:13:06.813379Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [3:139:2142] 2026-01-10T12:13:06.813483Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 1 count 2 actorID [3:139:2142] 2026-01-10T12:13:06.813528Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 1 size 3072032 actorID [3:139:2142] is actual 1 2026-01-10T12:13:06.813618Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 1 size 283 actorID [3:139:2142] 2026-01-10T12:13:06.813657Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 4 count 2 size 7415109 actorID [3:139:2142] is actual 1 2026-01-10T12:13:06.813724Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 2 size 271 actorID [3:139:2142] 2026-01-10T12:13:06.813922Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:13:06.813966Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-10T12:13:06.814012Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-10T12:13:06.814234Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-10T12:13:06.814297Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 1 partno 6 count 2 parts 14 suffix '0' size 271 2026-01-10T12:13:06.814354Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 3072032 2026-01-10T12:13:06.824343Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 7415109 2026-01-10T12:13:06.838249Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-10T12:13:06.838354Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 271 Write 3 done Got compacter offset = -1 2026-01-10T12:13:06.843871Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:318:2304], now have 1 active actors on pipe 2026-01-10T12:13:06.843966Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:13:06.844022Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:13:06.844164Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 6 for user __ydb_compaction_consumer 2026-01-10T12:13:06.844529Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:320:2306], now have 1 active actors on pipe Got start offset = 3 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TBackupCollectionTests::IncrementalBackupOperation [GOOD] >> TBackupCollectionTests::EmptyIncrementalBackupRace >> KqpRm::Reduce >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> KqpRm::DisonnectNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2026-01-10T12:13:06.396162Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:06.396835Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002b10/r3tmp/tmpErZCBG/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:06.397693Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002b10/r3tmp/tmpErZCBG/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002b10/r3tmp/tmpErZCBG/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4028505960880382874 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:06.498385Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:06.498881Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:06.535835Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:06.536018Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:06.540239Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:06.540405Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:06.540644Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:06.540724Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:06.540766Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:06.540862Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:06.541134Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:06.592421Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047186 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:06.592795Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:06.592942Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047186 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:06.593362Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:06.593526Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:06.593669Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:06.593711Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:06.593855Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047186 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:06.594125Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:06.594158Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:06.594249Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047186 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:06.594883Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:06.595017Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:06.595524Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:06.596334Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:06.596660Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:06.596847Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:06.597102Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:06.597429Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:06.597674Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:07.733120Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:13:07.733323Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:13:07.734737Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:08.061250Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> KqpRm::ManyTasks |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2026-01-10T12:13:08.555416Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:08.560722Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002aff/r3tmp/tmp3CSGdv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:08.561426Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002aff/r3tmp/tmp3CSGdv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002aff/r3tmp/tmp3CSGdv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8064293760849522260 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:08.635218Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:08.635591Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:08.672438Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:08.672623Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:08.672687Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:08.672752Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:08.672873Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:08.672918Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:08.672964Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:08.673030Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:08.673229Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:08.691537Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047188 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:08.691834Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:08.691937Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047188 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-10T12:13:08.692407Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:08.692584Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:08.692728Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:08.692768Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:08.692895Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047188 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:08.693134Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:08.693191Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:08.693266Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047188 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-10T12:13:08.693827Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:08.693979Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:08.694479Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:08.694954Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:08.695159Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:08.695258Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:08.695475Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:08.695758Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:08.695941Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:08.698968Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:467:2347]) priority=0 resources={0, 1000} 2026-01-10T12:13:08.699045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:08.699109Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:08.699163Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:08.699219Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:467:2347])) 2026-01-10T12:13:08.699508Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2026-01-10T12:13:08.699620Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:467:2347]) priority=0 resources={0, 100000} 2026-01-10T12:13:08.699687Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:08.699735Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task kqp-1-2-2 (2 by [1:467:2347]) 2026-01-10T12:13:08.699776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task kqp-1-2-2 (2 by [1:467:2347]) 2026-01-10T12:13:08.699868Z node 1 :KQP_RESOURCE_MANAGER NOTICE: kqp_rm_service.cpp:338: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2026-01-10T12:13:08.698860Z } |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> KqpRm::Reduce [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2026-01-10T12:13:09.436968Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:09.437399Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af9/r3tmp/tmpa5YH2j/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:09.440270Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af9/r3tmp/tmpa5YH2j/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af9/r3tmp/tmpa5YH2j/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3271400568847509106 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:09.489059Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:09.489341Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:09.504930Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:09.505041Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:09.505112Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:09.505182Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:09.505268Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:09.505299Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:09.505320Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:09.505371Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:09.505501Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:09.520509Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:09.520812Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:09.520896Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:09.521256Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:09.521376Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:09.521489Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:09.521521Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:09.521649Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:09.521853Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:09.521876Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:09.521934Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:09.522520Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:09.522622Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:09.523103Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:09.523555Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:09.523728Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:09.523889Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:09.524214Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:09.524443Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:09.524647Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:09.527417Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:09.527494Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:09.527550Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:09.527589Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:09.527656Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:13:09.527926Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:09.528208Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task kqp-1-1-1 (1 by [1:467:2347]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2026-01-10T12:13:09.528256Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:09.528286Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:13:09.528313Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2026-01-10T12:12:58.257608Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704809044532612:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:58.257674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:58.904183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:58.922926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:58.923030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:59.083245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:59.181435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:59.200808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:59.320392Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:59.580148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:59.620887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:59.765844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:03.658262Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704830360669174:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:03.658620Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:03.729745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:03.895397Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:03.895498Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:03.899901Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704830360669046:2081] 1768047183648906 != 1768047183648909 2026-01-10T12:13:03.924427Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:03.926018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:03.928230Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:04.235267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:04.242559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:04.275196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:04.675675Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TBackupCollectionTests::BackupWithIndexes [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit >> KqpRm::NotEnoughExecutionUnits >> KqpRm::NotEnoughMemory |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> KqpRm::SingleSnapshotByExchanger >> KqpRm::ManyTasks [GOOD] |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> KqpRm::SingleTask |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> KqpRm::DisonnectNodes [GOOD] >> KqpRm::NotEnoughExecutionUnits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2026-01-10T12:13:10.713570Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:10.714071Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002afa/r3tmp/tmpvcSNBX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:10.714597Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002afa/r3tmp/tmpvcSNBX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002afa/r3tmp/tmpvcSNBX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6485099522061227069 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:10.771729Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:10.772109Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:10.796058Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:10.796617Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:10.796687Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:10.796767Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:10.796924Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:10.796970Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:10.797025Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:10.797053Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:10.797178Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.833604Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.833881Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.833956Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.834326Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:10.834630Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:10.834766Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:10.835107Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.835224Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.835497Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:10.835537Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.835603Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.836556Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:10.836654Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.837371Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.837804Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.837974Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:10.838083Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:10.838265Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:10.838364Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:10.838433Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:10.846027Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.846105Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.846153Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.846203Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.846269Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:13:10.846534Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.846730Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.846769Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.846827Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.846852Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.846875Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:467:2347])) 2026-01-10T12:13:10.846904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.846993Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-3-3 (3 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.847034Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-3-3 (3 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847061Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.847096Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-3-3 (3 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847114Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:467:2347])) 2026-01-10T12:13:10.847131Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.847198Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-4-4 (4 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.847231Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-4-4 (4 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847251Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.847269Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-4-4 (4 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847296Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:467:2347])) 2026-01-10T12:13:10.847322Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.847386Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-5-5 (5 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.847428Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-5-5 (5 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847455Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.847469Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-5-5 (5 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847488Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:467:2347])) 2026-01-10T12:13:10.847503Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.847602Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-6-6 (6 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.847639Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-6-6 (6 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847660Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.847673Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-6-6 (6 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847688Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:467:2347])) 2026-01-10T12:13:10.847703Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.847762Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-7-7 (7 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.847786Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-7-7 (7 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847820Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.847845Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-7-7 (7 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.847869Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:467:2347])) 2026-01-10T12:13:10.852325Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.852531Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-8-8 (8 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.852589Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-8-8 (8 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.852626Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.852671Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-8-8 (8 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.852745Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:467:2347])) 2026-01-10T12:13:10.852786Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.852899Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-9-9 (9 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:10.852962Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-9-9 (9 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.852993Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:10.853035Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-9-9 (9 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:10.853062Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:467:2347])) 2026-01-10T12:13:10.853090Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:10.853204Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:13:10.853245Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:13:10.853287Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> KqpRm::NotEnoughMemory [GOOD] >> DstCreator::SameOwner >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 27694, MsgBus: 19121 2026-01-10T12:12:04.902661Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704575506164792:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:04.903932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:05.486317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:05.486451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:05.600647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:05.608567Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704575506164730:2081] 1768047124820120 != 1768047124820123 2026-01-10T12:12:05.630663Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:05.636558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:05.976593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:05.976663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:06.020350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:06.020387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:06.020396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:06.020485Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:06.588034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:06.594630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:09.449089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704596981002091:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:09.449266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:09.449990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704596981002112:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:09.450038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704596981002113:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:09.450076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:09.455711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:09.475230Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704596981002116:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:12:09.638419Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704596981002167:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:09.894267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704575506164792:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:09.897880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:10.006233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:12:10.248136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:12:10.248390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:12:10.248617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:12:10.248740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:12:10.248850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:12:10.248951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:12:10.249051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:12:10.249151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:12:10.249273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:12:10.249371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:12:10.249466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:12:10.249553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:12:10.249671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593704601275969638:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:12:10.272562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:12:10.272629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:12:10.272816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:12:10.272914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:12:10.273006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:12:10.273101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:12:10.273210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593704601275969655:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11 ... OLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.401643Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.411361Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.411445Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.411469Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.412407Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.412471Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.412495Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.422508Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.422595Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.422634Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.422948Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.423010Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.423032Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.433963Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.434050Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.434086Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.441322Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.441412Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.441463Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.444505Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.444605Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.444649Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:02.546719Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx5e4yd8f71frwybt4gae4", SessionId: ydb://session/3?node_id=4&id=ZTM5Zjg2ZjQtYjFkZDQ2MC02NmU0MDI2Ni1jYWVmMTVi, Slow query, duration: 15.352923s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-10T12:13:05.892293Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7593704761487953013:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715671; 2026-01-10T12:13:05.893493Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7593704838797373952:3654], SessionActorId: [4:7593704825912471675:3654], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7593704838797373952:3654].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-10T12:13:05.893651Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7593704838797373952:3654], SessionActorId: [4:7593704825912471675:3654], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7593704825912471675:3654]. 2026-01-10T12:13:05.893853Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=OTFhNGUwMzktMWEzNjFmMTgtOTI0YjE2YmItODVhMGZlMjM=, ActorId: [4:7593704825912471675:3654], ActorState: ExecuteState, LegacyTraceId: 01kekx62x8fv6qwz9621p8kvj4, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7593704838797374092:3654] from: [4:7593704838797373952:3654] trace_id# 2026-01-10T12:13:05.893951Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [4:7593704838797374092:3654] TxId: 281474976715671. Ctx: { TraceId: 01kekx62x8fv6qwz9621p8kvj4, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTFhNGUwMzktMWEzNjFmMTgtOTI0YjE2YmItODVhMGZlMjM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-10T12:13:05.894245Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=OTFhNGUwMzktMWEzNjFmMTgtOTI0YjE2YmItODVhMGZlMjM=, ActorId: [4:7593704825912471675:3654], ActorState: ExecuteState, LegacyTraceId: 01kekx62x8fv6qwz9621p8kvj4, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:13:05.896787Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593704838797373952:3654], SessionActorId: [4:7593704825912471675:3654], StateRollback: unknown message 278003713
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2026-01-10T12:13:05.908202Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.908899Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.909381Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.909871Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.910368Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.910820Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.911594Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.911998Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:13:05.912026Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2026-01-10T12:13:10.233607Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:10.234184Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002afb/r3tmp/tmp9KmZr2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:10.235116Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002afb/r3tmp/tmp9KmZr2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002afb/r3tmp/tmp9KmZr2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3550115004690771291 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:10.299742Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:10.300091Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:10.329560Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:10.329691Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:10.329746Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:10.329812Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:10.329924Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:10.329971Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:10.330001Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:10.330057Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:10.330230Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.349541Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.349817Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.349898Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.350208Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:10.350335Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:10.350442Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:10.350476Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.350609Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.350855Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:10.350882Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:10.350961Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:10.351476Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:10.351570Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.352035Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.352521Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:10.352743Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:10.352841Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:10.353356Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:10.353584Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:10.353743Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:11.375682Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:13:11.375794Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:13:11.376251Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-10T12:13:11.376467Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-10T12:13:11.376664Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-10T12:13:11.377499Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:360:2277] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2026-01-10T12:13:11.377753Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:151:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-10T12:13:11.377882Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:11.378164Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:492: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:11.378209Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:167: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:472:2105], reason: tenant updated 2026-01-10T12:13:11.378351Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.383065Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.383257Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.760877Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2026-01-10T12:13:11.819199Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:11.819745Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af7/r3tmp/tmpndyv0m/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:11.820320Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af7/r3tmp/tmpndyv0m/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af7/r3tmp/tmpndyv0m/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3939465667857989602 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:11.894908Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:11.895234Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:11.922467Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:11.922599Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:11.922651Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:11.922711Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:11.922833Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:11.922873Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:11.922908Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:11.922960Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:11.923113Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.958008Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.958308Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.958390Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.964250Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:11.964475Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:11.964617Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:11.964653Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.964852Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.965056Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:11.965080Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.965180Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.965673Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:11.965796Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.966209Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.966699Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.966891Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:11.967013Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:11.967210Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:11.967403Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:11.967558Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2026-01-10T12:13:11.852098Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:11.853834Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af6/r3tmp/tmpjberZn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:11.854374Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af6/r3tmp/tmpjberZn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af6/r3tmp/tmpjberZn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12671504275724985505 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:11.963428Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:11.963768Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:11.981451Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:11.981597Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:11.981655Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:11.981713Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:11.981838Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:11.981882Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:11.981927Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:11.981948Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:11.982077Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.997174Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.997444Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.997534Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.997890Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:11.998087Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:11.998216Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:11.998249Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.998361Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.998572Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:11.998600Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:11.998668Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047191 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:11.999115Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:11.999196Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:11.999614Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.000053Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.000320Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:12.000437Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:12.000690Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.000807Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.000917Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2026-01-10T12:13:12.765689Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:12.766219Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af2/r3tmp/tmpelkanw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:12.772672Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af2/r3tmp/tmpelkanw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af2/r3tmp/tmpelkanw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9180233846818878855 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:12.853527Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:12.853867Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:12.890539Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:12.890671Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:12.890726Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:12.890787Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:12.890913Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:12.890959Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:12.890992Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:12.891049Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:12.891218Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.931236Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.931545Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.931642Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.932007Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:12.939950Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:12.940162Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:12.940203Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.940393Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.940652Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:12.940681Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.940759Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.941304Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:12.941413Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.941864Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.942398Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.942582Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:12.942688Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:12.942938Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.943139Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.943309Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:12.948254Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:12.948347Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.948410Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:12.948461Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.948509Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:467:2347])) 2026-01-10T12:13:12.948763Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:12.948979Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:13:12.949044Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:467:2347])) 2026-01-10T12:13:12.949096Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> KqpScan::ScanRetryRead >> KqpRm::SingleSnapshotByExchanger [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> KqpScan::RemoteShardScan >> TBackupCollectionTests::BackupWithIndexesOmit [GOOD] >> TBackupCollectionTests::BackupWithIndexesDefault >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2026-01-10T12:13:06.914039Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003059/r3tmp/tmpB1L0Q7//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-10T12:13:06.952678Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:13:10.040162Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003059/r3tmp/tmpB1L0Q7//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-10T12:13:10.049720Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:13:11.061050Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003059/r3tmp/tmpB1L0Q7//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-10T12:13:11.069892Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:13:12.207777Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003059/r3tmp/tmpB1L0Q7//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-10T12:13:12.218394Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:13:13.180397Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003059/r3tmp/tmpB1L0Q7//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-10T12:13:13.193849Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-10T12:12:37.323688Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.323714Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.323756Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.324170Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.325918Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.342113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.344008Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-10T12:12:37.345406Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.345432Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.345460Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.345811Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.346674Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.346818Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.347213Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:37.347749Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.347877Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:37.352188Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:37.352328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:12:37.353450Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.353491Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.353532Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.354763Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.368406Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.368641Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.369019Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:37.369505Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.372211Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:37.376189Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:37.376290Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:12:37.377764Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.377797Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.377821Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.378284Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.379036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.379232Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.379554Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:37.380503Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.380727Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:37.380832Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:37.380880Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-10T12:12:37.382019Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.382043Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.382173Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.382513Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.383942Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.384069Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.384580Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:37.385463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.385682Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:37.385808Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:37.385870Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:12:37.386899Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.386924Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.386946Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:37.388384Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:12:37.389828Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:12:37.390041Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.390292Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:12:37.390986Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:37.391241Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:12:37.391330Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:12:37.391359Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:12:37.391412Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2026-01-10T12:12:37.398420Z :ReadSession INFO: Random seed for debugging is 1768047157398387 2026-01-10T12:12:37.760442Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704718835806109:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.760548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.847507Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704720689962697:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.847768Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.886462Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:37.887150Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:37.891221Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.177633Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.197047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.252438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.252551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.256315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.256401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.314290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.321099Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:38.326306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.385083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:38.395754Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:38.487976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:38.555657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002a00/r3tmp/yandexau5V5m.tmp 2026-01-10T12:12:38.555680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002a00/r3tmp/yandexau5V5m.tmp 2026-01-10T1 ... eIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.456473Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.560211Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:08.560251Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.560264Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:08.560283Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.560298Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.661509Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:08.661551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.661566Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:08.661585Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.661597Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.764295Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:08.764335Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.764345Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:08.764363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.764374Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.864674Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:08.864709Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.864719Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:08.864738Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.864749Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.930452Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_4936639689192822435_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-10T12:13:08.972250Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:08.972290Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.972304Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:08.972319Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:08.972330Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:08.984159Z :INFO: [/Root] [/Root] [882dee09-1922128b-6625c3a1-463353af] Closing read session. Close timeout: 0.000000s 2026-01-10T12:13:08.984319Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-10T12:13:08.984372Z :INFO: [/Root] [/Root] [882dee09-1922128b-6625c3a1-463353af] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16757 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:13:08.984472Z :NOTICE: [/Root] [/Root] [882dee09-1922128b-6625c3a1-463353af] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:13:08.984516Z :DEBUG: [/Root] [/Root] [882dee09-1922128b-6625c3a1-463353af] [dc1] Abort session to cluster 2026-01-10T12:13:08.985066Z :NOTICE: [/Root] [/Root] [882dee09-1922128b-6625c3a1-463353af] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:13:08.986601Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_4936639689192822435_v1 grpc read done: success# 0, data# { } 2026-01-10T12:13:08.986642Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_4936639689192822435_v1 grpc read failed 2026-01-10T12:13:08.986699Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_4936639689192822435_v1 grpc closed 2026-01-10T12:13:08.986760Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_4936639689192822435_v1 is DEAD 2026-01-10T12:13:08.988733Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593704783260318092:2492] disconnected. 2026-01-10T12:13:08.988761Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593704783260318092:2492] disconnected; active server actors: 1 2026-01-10T12:13:08.988792Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7593704783260318092:2492] client user disconnected session shared/user_1_1_4936639689192822435_v1 2026-01-10T12:13:08.990446Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_4936639689192822435_v1 2026-01-10T12:13:08.990494Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7593704783260318095:2495] destroyed 2026-01-10T12:13:08.990560Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_4936639689192822435_v1 2026-01-10T12:13:09.069297Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:09.069340Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:09.069352Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:09.069369Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:09.069382Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:09.171714Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:09.171751Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:09.171775Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:09.171793Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:09.171805Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:11.234096Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:11.234147Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:11.234176Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:13:11.234577Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:13:11.235187Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:13:11.235387Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:11.240349Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:13:11.241137Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:13:11.241674Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:13:11.241899Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-10T12:13:11.241994Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:13:11.242070Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:13:11.242114Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-10T12:13:11.242266Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:13:11.242315Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-10T12:13:13.249461Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:13.249511Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:13.249544Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:13:13.259685Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:13:13.348376Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:13:13.348634Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:13.364935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:13:13.365146Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:13:13.376208Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:13:13.376355Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBackupCollectionTests::EmptyIncrementalBackupRace [GOOD] >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2026-01-10T12:13:12.309975Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:13:12.310585Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af3/r3tmp/tmprGUkCp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:13:12.311375Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af3/r3tmp/tmprGUkCp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af3/r3tmp/tmprGUkCp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16874311542436842240 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:13:12.377557Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:12.377944Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:13:12.399538Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:13:12.399724Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:13:12.399796Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:13:12.399874Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:13:12.400019Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:12.400072Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:12.400158Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:13:12.400231Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:13:12.400448Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.423768Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.424135Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.424249Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.424632Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:12.424779Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:13:12.424912Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:12.424957Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.425100Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.425367Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:13:12.425396Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.425476Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:12.426075Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:13:12.426261Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.426758Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.427285Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:13:12.427500Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:13:12.427693Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:12.427939Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.428243Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:13:12.428452Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:12.432334Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:12.432420Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.432481Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:12.432528Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.432575Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:467:2347])) 2026-01-10T12:13:12.432864Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:12.432983Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:13:12.433050Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.433101Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:13:12.433145Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:13:12.433183Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:467:2347])) 2026-01-10T12:13:12.433269Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:13:12.433531Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:12.433697Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047192 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-10T12:13:12.433990Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:13.495160Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:13:13.495449Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:13:13.495552Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300050 (remove task kqp-1-2-1 (1 by [1:467:2347])) 2026-01-10T12:13:13.495656Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100100 2026-01-10T12:13:13.495768Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:13:13.495859Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:13:13.495937Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300050 to 0.100100 (remove task kqp-2-1-2 (2 by [1:467:2347])) 2026-01-10T12:13:13.496020Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:13:13.496805Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:13:13.497066Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047193 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:13:13.497723Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:13:13.825672Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |88.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |88.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> TLocksFatTest::LocksLimit [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> TBackupCollectionTests::BackupWithIndexesDefault [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> StatisticsSaveLoad::Simple |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TTxLocatorTest::TestWithReboot >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes >> TTxLocatorTest::Boot >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> TTxLocatorTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:11:09.973665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:10.093020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:10.102024Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:10.102468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:10.102575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:10.468005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:10.468196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:10.474828Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047067198378 != 1768047067198381 2026-01-10T12:11:10.492365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:10.539763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:10.633747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:11.152507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:11.153284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:11.153336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:11.153370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:11.153936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:11.169172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:22.978461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.978610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.978688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.979935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.980048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:22.985123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:23.010902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2826], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:11:23.087109Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2860] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:23.381097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:24.253952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:24.722073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:25.726795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:26.438354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:26.941671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:28.163241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:28.573321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-10T12:11:34.909993Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx2zer8hg8aqx2j0r6b420", SessionId: ydb://session/3?node_id=1&id=YzMxOWYyNjAtNzcyZDVmYmMtODI2MDQ5MzEtZDFkOTc2MTc=, Slow query, duration: 11.933844s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n ", parameters: 0b REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-10T12:11:35.594077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:35.594165Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:46.518845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:47.871092Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager. ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:04.141265Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:04.142041Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-10T12:13:04.142083Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-10T12:13:04.142114Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-01-10T12:13:04.142145Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-01-10T12:13:04.142187Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-01-10T12:13:04.142219Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:04.142261Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:04.143158Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3136:4496];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:13:04.143290Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3142:4498];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-10T12:13:04.143404Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3151:4505];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-10T12:13:15.739650Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.739851Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.739903Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.739946Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.752295Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.753019Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-10T12:13:15.753390Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.753475Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-10T12:13:15.753542Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.753585Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.753674Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.753733Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.753765Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-10T12:13:15.753798Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.753827Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.753870Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.753909Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.753938Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-10T12:13:15.753972Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.754000Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.754041Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.754097Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.754124Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:15.754155Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.754184Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.754227Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.754583Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.754617Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:15.754651Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.754694Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.754936Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-10T12:13:15.754969Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-10T12:13:15.755001Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.755032Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-10T12:13:15.755070Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-10T12:13:15.755461Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3136:4496];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:15.755569Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3142:4498];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-10T12:13:15.755660Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3151:4505];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2026-01-10T12:13:19.160566Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:19.161124Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:19.161991Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:19.166693Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.167360Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:19.197982Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.198130Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.198249Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:19.198412Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.198453Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.198583Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:19.198705Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:19.200273Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-10T12:13:19.200982Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-10T12:13:19.201389Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-10T12:13:19.201767Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-10T12:13:19.202219Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-10T12:13:19.202577Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.202677Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-10T12:13:19.202888Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.203176Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.203264Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-10T12:13:19.203445Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.203610Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-10T12:13:19.203802Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.204026Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.204107Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.204253Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.204401Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-10T12:13:19.204549Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.204627Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-10T12:13:19.204888Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-10T12:13:19.204931Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-10T12:13:19.205095Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.205156Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.205279Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-10T12:13:19.205308Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-10T12:13:19.205382Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.205492Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-10T12:13:19.205530Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-10T12:13:19.205666Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.205758Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-10T12:13:19.205790Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-10T12:13:19.205858Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.205924Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.206002Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-10T12:13:19.206038Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-10T12:13:19.206173Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.206243Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-10T12:13:19.206267Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-10T12:13:19.206366Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.206413Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-10T12:13:19.206447Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-10T12:13:19.206532Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.206595Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-10T12:13:19.206629Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-10T12:13:19.206796Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.206847Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-10T12:13:19.206871Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-10T12:13:19.206940Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.207032Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-10T12:13:19.207066Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-10T12:13:19.213728Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 720575 ... ender# [1:403:2437] TEvAllocateResult from# 8500000 to# 8600000 2026-01-10T12:13:19.291765Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.291835Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2026-01-10T12:13:19.291861Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8600000 to# 8700000 2026-01-10T12:13:19.291994Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.292054Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2026-01-10T12:13:19.292098Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8700000 to# 8800000 2026-01-10T12:13:19.292205Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2026-01-10T12:13:19.292229Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8800000 to# 8900000 2026-01-10T12:13:19.292307Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2026-01-10T12:13:19.292342Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:411:2445] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-10T12:13:19.299192Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2026-01-10T12:13:19.299571Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2026-01-10T12:13:19.300109Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2026-01-10T12:13:19.300595Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2026-01-10T12:13:19.300778Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.301040Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.301248Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.301434Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2026-01-10T12:13:19.301584Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.301736Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2026-01-10T12:13:19.301885Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.302134Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.302284Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2026-01-10T12:13:19.302438Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.302594Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.302745Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2026-01-10T12:13:19.302874Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.303039Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.303140Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2026-01-10T12:13:19.303473Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.303533Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.303626Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:451:2485] requested range size#100000 2026-01-10T12:13:19.303883Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2026-01-10T12:13:19.303928Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9000000 to# 9100000 2026-01-10T12:13:19.304024Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.304200Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2026-01-10T12:13:19.304231Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9100000 to# 9200000 2026-01-10T12:13:19.304287Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.304351Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.304488Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2026-01-10T12:13:19.304525Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9200000 to# 9300000 2026-01-10T12:13:19.304589Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.304754Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2026-01-10T12:13:19.304781Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9300000 to# 9400000 2026-01-10T12:13:19.304835Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.304974Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2026-01-10T12:13:19.305001Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9400000 to# 9500000 2026-01-10T12:13:19.305048Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.305169Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-10T12:13:19.305192Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9500000 to# 9600000 2026-01-10T12:13:19.305249Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.305372Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-10T12:13:19.305414Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9600000 to# 9700000 2026-01-10T12:13:19.305465Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.305579Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-10T12:13:19.305616Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9700000 to# 9800000 2026-01-10T12:13:19.305707Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-10T12:13:19.305735Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9800000 to# 9900000 2026-01-10T12:13:19.305875Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-10T12:13:19.305913Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:451:2485] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2026-01-10T12:12:56.583535Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704800918015846:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:56.583777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:57.201081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:57.201202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:57.203662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:57.484841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:57.484935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704800918015721:2081] 1768047176487640 != 1768047176487643 2026-01-10T12:12:57.499205Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:57.600214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:57.683631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:57.883280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:57.900922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:57.912876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:58.030013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:58.325094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:58.443706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:01.560564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704800918015846:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:01.560656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:08.484491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:08.484633Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:08.603606Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:08.607127Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704854442329426:2081] 1768047188382571 != 1768047188382574 2026-01-10T12:13:08.637058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:08.637144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:08.661625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:08.688945Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:08.860381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:08.892413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:08.900728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:09.044505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:09.107128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:09.481492Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:13.184376Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704872159950277:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:13.184519Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:13.232412Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:13.359700Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:13.366636Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593704872159950082:2081] 1768047193166081 != 1768047193166084 2026-01-10T12:13:13.381252Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:13.381343Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:13.396239Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:13.439646Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:13.677534Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:13.684226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:13.705157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:13.804503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:13.878434Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:13.923252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:14.189428Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2026-01-10T12:13:19.737038Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:19.737554Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:19.738327Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:19.739900Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.740394Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:19.751744Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.751897Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.752015Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:19.752419Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.752469Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.752612Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:19.752763Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2026-01-10T12:13:19.735707Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:19.736397Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:19.737284Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:19.739163Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.739762Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:19.756022Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.760440Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.760623Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:19.760804Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.760846Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.760970Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:19.761119Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:19.762702Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-10T12:13:19.763354Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-10T12:13:19.763723Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-10T12:13:19.768367Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-10T12:13:19.769034Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-10T12:13:19.769522Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.769636Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-10T12:13:19.769858Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.770113Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.770210Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-10T12:13:19.770408Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.770573Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-10T12:13:19.770767Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.771019Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.771076Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.771215Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.771341Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-10T12:13:19.771500Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.771582Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-10T12:13:19.771863Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-10T12:13:19.771915Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-10T12:13:19.776206Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.776346Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.776539Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-10T12:13:19.776577Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-10T12:13:19.776670Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.776829Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-10T12:13:19.776869Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-10T12:13:19.777020Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.777136Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-10T12:13:19.777180Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-10T12:13:19.777253Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.777319Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.777412Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-10T12:13:19.777435Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-10T12:13:19.777602Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.777660Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-10T12:13:19.777685Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-10T12:13:19.777776Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.777818Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-10T12:13:19.777865Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-10T12:13:19.777966Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.778038Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-10T12:13:19.778079Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-10T12:13:19.778229Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.778281Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-10T12:13:19.778305Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-10T12:13:19.778367Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:19.778426Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-10T12:13:19.778452Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-10T12:13:19.785361Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 7205759404 ... # 9500000 2026-01-10T12:13:20.362370Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:619:2556] TEvAllocateResult from# 9400000 to# 9500000 2026-01-10T12:13:20.362503Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.362596Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-10T12:13:20.362631Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:621:2558] TEvAllocateResult from# 9500000 to# 9600000 2026-01-10T12:13:20.362771Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.362851Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.362922Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-10T12:13:20.362960Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:623:2560] TEvAllocateResult from# 9600000 to# 9700000 2026-01-10T12:13:20.363221Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.363283Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.363365Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-10T12:13:20.363403Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:625:2562] TEvAllocateResult from# 9700000 to# 9800000 2026-01-10T12:13:20.363526Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.363606Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-10T12:13:20.363635Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:627:2564] TEvAllocateResult from# 9800000 to# 9900000 2026-01-10T12:13:20.363763Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.363889Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.363964Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-10T12:13:20.363996Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:629:2566] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-10T12:13:20.369696Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2026-01-10T12:13:20.370939Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:876: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2026-01-10T12:13:20.372089Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:421: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2026-01-10T12:13:20.372171Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2026-01-10T12:13:20.372367Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-10T12:13:20.372443Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-10T12:13:20.372499Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372545Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-10T12:13:20.372618Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372689Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372742Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372770Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2026-01-10T12:13:20.372796Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372825Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.372949Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:632: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2026-01-10T12:13:20.372984Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-10T12:13:20.373022Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-10T12:13:20.373098Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-10T12:13:20.373127Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-10T12:13:20.373160Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2026-01-10T12:13:20.373198Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2026-01-10T12:13:20.373220Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2026-01-10T12:13:20.373244Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2026-01-10T12:13:20.373271Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2026-01-10T12:13:20.373305Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2026-01-10T12:13:20.373329Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2026-01-10T12:13:20.373530Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:20.374937Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.378894Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:20.379191Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:20.380372Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:20.380455Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1641:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.380614Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:20.380700Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] Test command err: 2026-01-10T12:11:06.277219Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704328856475430:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:06.277338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:06.519175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:06.522357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:06.522455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:06.585336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:06.640768Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704328856475403:2081] 1768047066275247 != 1768047066275250 2026-01-10T12:11:06.645582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:06.711879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:06.711914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:06.711921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:06.712010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:06.715740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:06.905186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:07.130585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:11:07.134752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:11:07.137111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:07.203661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.291131Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:07.328693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.378009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.423552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.454307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.489219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.517950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.550846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.579764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:07.611660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:09.386721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704341741378942:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:09.386821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704341741378951:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:09.386879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:09.387662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704341741378958:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:09.387747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:09.390725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:09.403919Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704341741378956:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-10T12:11:09.487606Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704341741379010:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:09.947855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:09.979700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:10.008680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:10.041373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:10.067272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp: ... :18.126031Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:18.126074Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 1ms 2026-01-10T12:13:18.126544Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:18.126582Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:13:18.126678Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 2ms 2026-01-10T12:13:18.127839Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:18.251818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550397:2833], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.251905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704895633550398:2834], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:18.251968Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.252980Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550401:2835], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.253039Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.257476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704895633550395:2832]: Pool not found 2026-01-10T12:13:18.258164Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:13:18.800285Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550420:2841], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.800429Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704895633550421:2842], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:18.800502Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.805570Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550424:2843], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.805662Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.813908Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704895633550418:2840]: Pool not found 2026-01-10T12:13:18.814701Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:13:18.820482Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704895633550442:2849], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:18.820869Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550441:2848], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.820976Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.821884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704895633550445:2850], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.821948Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:18.826479Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704895633550439:2847]: Pool not found 2026-01-10T12:13:18.827252Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:13:18.992216Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:33626) connection closed by inactivity timeout |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TTxLocatorTest::TestAllocateAll >> TTxLocatorTest::TestAllocateAll [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TTxLocatorTest::TestImposibleSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2026-01-10T12:13:21.211163Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:21.211726Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:21.212584Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:21.214463Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.215128Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:21.226773Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.226918Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.227041Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:21.227204Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.227245Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.227377Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:21.227525Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:21.228279Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#8796093022207 2026-01-10T12:13:21.228890Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.228960Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.229073Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2026-01-10T12:13:21.229115Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2026-01-10T12:13:21.235290Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2026-01-10T12:13:21.235932Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.236020Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.236158Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2026-01-10T12:13:21.236203Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2026-01-10T12:13:21.236635Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2026-01-10T12:13:21.237039Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.237123Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.237225Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2026-01-10T12:13:21.237291Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2026-01-10T12:13:21.237654Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2026-01-10T12:13:21.237987Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.238065Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.238146Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2026-01-10T12:13:21.238192Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2026-01-10T12:13:21.238587Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2026-01-10T12:13:21.238983Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.239066Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.239181Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2026-01-10T12:13:21.239224Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2026-01-10T12:13:21.239594Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2026-01-10T12:13:21.239961Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.240015Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.240147Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2026-01-10T12:13:21.240190Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2026-01-10T12:13:21.240627Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2026-01-10T12:13:21.240966Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.241068Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.241152Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2026-01-10T12:13:21.241188Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2026-01-10T12:13:21.241619Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2026-01-10T12:13:21.242070Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.242141Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.242224Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2026-01-10T12:13:21.242261Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2026-01-10T12:13:21.242721Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2026-01-10T12:13:21.243080Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.243175Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.243251Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2026-01-10T12:13:21.243289Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2026-01-10T12:13:21.243742Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2026-01-10T12:13:21.244127Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.244191Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.244274Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Suc ... node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2026-01-10T12:13:21.258617Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2026-01-10T12:13:21.258955Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.259046Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.259136Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2026-01-10T12:13:21.259178Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2026-01-10T12:13:21.259861Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2026-01-10T12:13:21.260201Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.260268Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.260375Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2026-01-10T12:13:21.260426Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2026-01-10T12:13:21.261163Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2026-01-10T12:13:21.261463Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.261540Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.261624Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2026-01-10T12:13:21.261662Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2026-01-10T12:13:21.262479Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2026-01-10T12:13:21.262808Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.262880Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.262992Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2026-01-10T12:13:21.263052Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2026-01-10T12:13:21.263776Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2026-01-10T12:13:21.264099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.264207Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.264325Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2026-01-10T12:13:21.264365Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2026-01-10T12:13:21.265068Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2026-01-10T12:13:21.265426Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.265494Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.265589Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2026-01-10T12:13:21.265632Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2026-01-10T12:13:21.266443Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2026-01-10T12:13:21.266796Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.266872Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.266950Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2026-01-10T12:13:21.266996Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2026-01-10T12:13:21.267775Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2026-01-10T12:13:21.268107Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.268175Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.268283Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2026-01-10T12:13:21.268328Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2026-01-10T12:13:21.269148Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2026-01-10T12:13:21.269546Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.269603Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.269683Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2026-01-10T12:13:21.269722Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2026-01-10T12:13:21.270493Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#8796093022207 2026-01-10T12:13:21.270802Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.270898Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.270996Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2026-01-10T12:13:21.271056Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2026-01-10T12:13:21.271871Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#31 2026-01-10T12:13:21.272251Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.272318Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:21.272410Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2026-01-10T12:13:21.272447Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2026-01-10T12:13:21.273259Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:205:2239] requested range size#1 2026-01-10T12:13:21.273371Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-10T12:13:21.273417Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:205:2239] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2026-01-10T12:13:22.161049Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:22.161606Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:22.162446Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:22.164281Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.164867Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:22.175748Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.175899Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.176018Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:22.178426Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.178500Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.178647Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:22.178812Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:22.179658Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710655 2026-01-10T12:13:22.180346Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.180424Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.180572Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2026-01-10T12:13:22.180629Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2026-01-10T12:13:22.186543Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#1 2026-01-10T12:13:22.186746Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-10T12:13:22.186787Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables >> TTxLocatorTest::TestZeroRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2026-01-10T12:13:22.681357Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:22.682085Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:22.683161Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:22.685494Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.686267Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:22.699748Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.699916Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.700059Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:22.700257Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.700309Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.700485Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:22.700683Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:22.701566Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710656 2026-01-10T12:13:22.701783Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2026-01-10T12:13:22.707699Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-10T12:13:22.708397Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#123456 2026-01-10T12:13:22.709041Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.709128Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.709361Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2026-01-10T12:13:22.709413Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2026-01-10T12:13:22.709829Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2114] requested range size#281474976587200 2026-01-10T12:13:22.710021Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2026-01-10T12:13:22.710074Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:80:2114] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-10T12:13:22.710489Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#246912 2026-01-10T12:13:22.710985Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.711093Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:22.711235Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2026-01-10T12:13:22.711279Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2026-01-10T12:13:22.711687Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#281474976340288 2026-01-10T12:13:22.711795Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2026-01-10T12:13:22.711831Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2026-01-10T12:13:24.084445Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:13:24.085071Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:13:24.085892Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:13:24.087780Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.088503Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:13:24.099979Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.100208Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.100396Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:13:24.100594Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.100640Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.100771Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:13:24.100921Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:13:24.101820Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#0 2026-01-10T12:13:24.102499Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.102559Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:13:24.102687Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2026-01-10T12:13:24.102749Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 0 expected SUCCESS |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 |88.2%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DstCreator::SamePartitionCount [GOOD] |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> ColumnShardTiers::TTLUsage [GOOD] >> VDiskIOTest::HugeBlobIOCount |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2026-01-10T12:13:13.086818Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704871719874736:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:13.086908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:13.556368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:13.643154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:13.648250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:13.727510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:13.728493Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704871719874551:2081] 1768047193060861 != 1768047193060864 2026-01-10T12:13:13.766067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:13.849789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:14.088471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:14.461088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:14.461116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:14.461124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:14.461244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:15.183503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:15.206192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:15.219719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:15.442325Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:13:15.442351Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:13:15.442964Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:13:18.087263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704871719874736:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:18.087352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:18.562273Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047195362, tx_id: 281474976710659 } } } 2026-01-10T12:13:18.562675Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:13:18.572641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:18.574359Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2026-01-10T12:13:18.574374Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2026-01-10T12:13:18.650370Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2026-01-10T12:13:18.650410Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:13:20.410578Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704905526981355:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:20.410718Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:20.552036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:20.804331Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:20.851071Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:20.862564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:20.862664Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:20.866727Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:21.054803Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:21.204699Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:21.204719Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:21.204724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:21.204800Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:21.424368Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:21.500922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:21.511447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:21.611804Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:13:21.611832Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:13:21.612401Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:13:25.412338Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704905526981355:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:25.412422Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:25.684558Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047201613, tx_id: 281474976710658 } } } 2026-01-10T12:13:25.685189Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:13:25.687074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:25.688899Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:13:25.688913Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:13:25.766104Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:13:25.766138Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |88.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:11:08.357835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:08.477213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:08.486773Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:08.487149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:08.487229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:08.830949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:08.831132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:08.837708Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047065423495 != 1768047065423498 2026-01-10T12:11:08.855072Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:08.902388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:09.002754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:09.489846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:09.490491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:09.490551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:09.490585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:09.491045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:09.505567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:09.646677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-10T12:11:09.751757Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:11:09.755255Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:11:09.755696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:11:09.796556Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:11:09.796892Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-10T12:11:09.805666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:09.806021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:09.806298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:09.806435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:09.806552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:09.806678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:09.806812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:09.806936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:09.807086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:09.807214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:09.807372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:11:09.807501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:11:09.807658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:11:09.835056Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:11:09.835828Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:910:2776], Recipient [1:953:2808]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:11:09.839301Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:910:2776], Recipient [1:953:2808]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:11:09.839642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:11:09.871359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:11:09.871582Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037889 2026-01-10T12:11:09.877288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:11:09.877405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:11:09.877605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:11:09.877735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:11:09.877866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:11:09.877986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:11:09.878092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:11:09.878200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:11:09.878331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:11:09.878447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:11:09.878549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:1 ... 2812]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.660625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:959:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:13:25.660704Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.660735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP column0/uint64_value: 0 TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5031;count=35;size=14676;count=211;size=1503;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5031;count=35;size=14746;count=212;size=1503;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-10T12:13:25.855157Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.855706Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:13:25.855831Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2808]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.855871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:13:25.855963Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2812]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.855997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:959:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:13:25.856072Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:25.856125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP TEvBlobStorage::TEvPut tId=72057594037968897;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1503;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1503;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3741;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14816;count=213;size=1503;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3741;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-10T12:13:26.022149Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:26.022237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-10T12:13:26.022333Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2808]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:26.022365Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-10T12:13:26.022429Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2812]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:26.022461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:959:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-10T12:13:26.022528Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-10T12:13:26.022561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP REQUEST=SELECT COUNT(*) FROM `/Root/olapStore/olapTable`;EXPECTATION=1 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] Test command err: 2026-01-10T12:11:13.345890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704359684685178:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:13.346141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:13.393194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:13.658615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:13.658762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:13.661510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:13.833184Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:13.841016Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704359684685151:2081] 1768047073342162 != 1768047073342165 2026-01-10T12:11:13.841169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:13.863100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:13.863128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:13.863141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:13.863233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:14.068832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:14.139190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:14.287140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:11:14.292321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:11:14.295164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:14.312030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:11:14.357320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:14.398236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.549342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.596390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-10T12:11:14.600637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.646577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.684883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.722148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.758350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.792283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.826327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:14.861738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:16.721169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704372569588710:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:16.721186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704372569588702:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:16.721301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:16.721595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704372569588717:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:16.721702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:16.726083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:16.739281Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704372569588716:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:11:16.819430Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704372569588769:3111] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:17.287403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.324314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.396828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:17.434524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose ... :25.800200Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:25.800242Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 3ms 2026-01-10T12:13:25.800677Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:25.800716Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:13:25.800883Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 8ms 2026-01-10T12:13:25.801459Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:25.838236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704926684104921:2782], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:25.838332Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704926684104922:2783], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:25.838382Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:25.839037Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704926684104925:2784], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:25.839422Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:25.844412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704926684104919:2781]: Pool not found 2026-01-10T12:13:25.844622Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:13:26.368936Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704930979072242:2791], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:26.369046Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704930979072241:2790], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.369174Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.370149Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704930979072245:2792], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.370220Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.377274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704930979072239:2789]: Pool not found 2026-01-10T12:13:26.378082Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:13:26.382044Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704930979072262:2797], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.382138Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704930979072263:2798], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:26.382190Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.382738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704930979072266:2799], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.383139Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.387957Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704930979072260:2796]: Pool not found 2026-01-10T12:13:26.388215Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:13:26.616600Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:53020) connection closed by inactivity timeout |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> TBackupCollectionTests::TableWithMixedIndexTypes [GOOD] >> TBackupCollectionTests::MultipleTablesWithIndexes >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod >> TBSV::CleanupDroppedVolumesOnRestart |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TBSV::ShouldLimitBlockStoreVolumeDropRate |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] Test command err: 2026-01-10T12:08:23.506240Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703630341703306:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:23.506294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:24.291888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.461189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.461292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.514367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:24.598335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:08:24.601836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:24.643450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:25.020634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:25.020656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:25.020662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:25.020735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:25.212199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:08:25.527361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:28.507866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703630341703306:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:28.507933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:08:28.795612Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-10T12:08:28.799500Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-10T12:08:28.799524Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-10T12:08:28.803150Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MmMzOGM4NzYtZDhmZjI3YjMtY2M2ZTJjYmYtZGM5NDM0Yzg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MmMzOGM4NzYtZDhmZjI3YjMtY2M2ZTJjYmYtZGM5NDM0Yzg= temp_dir_name# cbe3a998-40f1-6f1a-7883-2ab5c3f610d6 trace_id# 2026-01-10T12:08:28.816209Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703651816541085:2329], Start check tables existence, number paths: 2 2026-01-10T12:08:28.816334Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MmMzOGM4NzYtZDhmZjI3YjMtY2M2ZTJjYmYtZGM5NDM0Yzg=, ActorId: [1:7593703651816541089:2333], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-10T12:08:28.817770Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703651816541085:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-10T12:08:28.817840Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703651816541085:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-10T12:08:28.817873Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7593703651816541085:2329], Successfully finished 2026-01-10T12:08:28.817925Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-10T12:08:28.817969Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-10T12:08:28.885899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:28.946698Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593703647958507544:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:28.947035Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:29.022074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.022143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.054315Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-10T12:08:29.053781Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:08:29.058771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.188697Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:29.188818Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:29.189856Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190022Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190125Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190215Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190275Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190380Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190476Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190546Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.190615Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:29.212658Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:29.504597Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-10T12:08:29.504924Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:29.782767Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:29.782816Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:29.782825Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:29.782926Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:29.921097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:29.968424Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:30.002594Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593703652530629191:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:08:30.002872Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:08:30.096782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:30.096867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:30.102669Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:30.140456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:30.160450Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-10T12:08:30.251085Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:30.251178Z node 2 :HIVE WAR ... erId: [13:7593704911247127163:3538], ActorId: [13:7593704919837061917:3576], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, DataQuery #1 finished UNAVAILABLE, Issues: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } }, SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, TxId: 01kekx6nvg13dsxa55g2xzhc7z 2026-01-10T12:13:24.225935Z node 13 :KQP_WORKLOAD_SERVICE WARN: query_actor.cpp:376: [TQueryBase] [TDelayRequestQuery] OwnerId: [13:7593704911247127163:3538], ActorId: [13:7593704919837061917:3576], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, Finish with UNAVAILABLE, Issues: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } }, SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, TxId: 01kekx6nvg13dsxa55g2xzhc7z 2026-01-10T12:13:24.225982Z node 13 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TDelayRequestQuery] OwnerId: [13:7593704911247127163:3538], ActorId: [13:7593704919837061917:3576], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, Delete session: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg== 2026-01-10T12:13:24.647352Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=11&id=ZWQzNDUxMjAtMmY5NGFjNDQtNjM1OWUxYjEtZTU3M2Q4ZWQ=, ActorId: [11:7593704681560874616:2333], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:13:24.226137Z node 13 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TDelayRequestQuery] OwnerId: [13:7593704705088692096:2369], ActorId: [13:7593704911247127163:3538], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, Got response [13:7593704919837061917:3576] UNAVAILABLE 2026-01-10T12:13:24.226319Z node 13 :KQP_WORKLOAD_SERVICE ERROR: pool_handlers_actors.cpp:777: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7593704705088692096:2369], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, failed to delay request UNAVAILABLE, session id: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, issues: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } 2026-01-10T12:13:24.226525Z node 13 :KQP_WORKLOAD_SERVICE WARN: pool_handlers_actors.cpp:323: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7593704705088692096:2369], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Reply continue error UNAVAILABLE to [13:7593704773808172230:3216], session id: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, issues: {
: Error: Failed to put request in queue, code: 2033 subissue: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } } 2026-01-10T12:13:24.226612Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, ActorId: [13:7593704919837061919:3578], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:13:24.226672Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, ActorId: [13:7593704919837061919:3578], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-10T12:13:24.647521Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=11&id=ZWQzNDUxMjAtMmY5NGFjNDQtNjM1OWUxYjEtZTU3M2Q4ZWQ=, ActorId: [11:7593704681560874616:2333], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:13:24.226716Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, ActorId: [13:7593704919837061919:3578], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-10T12:13:24.226760Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, ActorId: [13:7593704919837061919:3578], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:13:24.226864Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=YjJjYzRjM2ItMWVmZGY3Yy1kOTMwYjMwNS1mYTQ2NDdkNg==, ActorId: [13:7593704919837061919:3578], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-10T12:13:24.227465Z node 13 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:547} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Database disks are without free space pool_id# sample_pool_id trace_id# 2026-01-10T12:13:24.227518Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:553} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Continue request pool_id# sample_pool_id trace_id# 2026-01-10T12:13:24.227675Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Sending CompileQuery request trace_id# 2026-01-10T12:13:24.227731Z node 13 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:342: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Duration: 0.000000s, CpuConsumed: 0.000000s, AdjustCpuQuota: 0 2026-01-10T12:13:24.574174Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136744764795032 trace_id# 2026-01-10T12:13:24.574244Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Sending to Executer span_id_size# 8 trace_id# 2026-01-10T12:13:24.574563Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Created new KQP executer executer_id# [13:7593704919837061938:3216] is_rollback# false trace_id# 2026-01-10T12:13:24.588487Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Forwarded TEvStreamData to [11:7593704776050156923:4227] trace_id# 2026-01-10T12:13:24.593084Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-10T12:13:24.593272Z node 13 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx5mz7f5ec70za8svnd3sa", SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, Slow query, duration: 34.057324s, status: STATUS_CODE_UNSPECIFIED, user: user@system, results: 0b, text: "SELECT 42;", parameters: 0b 2026-01-10T12:13:24.593356Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, TxInfo status# Committed kind# Pure total_duration# 19.314 server_duration# 19.231 queries_count# 2 trace_id# 2026-01-10T12:13:24.593444Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-10T12:13:24.594178Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: ExecuteState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-10T12:13:24.594411Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: CleanupState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, EndCleanup is_final# true trace_id# 2026-01-10T12:13:24.594609Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: CleanupState, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Sent query response back to proxy proxy_request_id# 171 proxy_id# [13:7593704683613854680:2266] trace_id# 2026-01-10T12:13:24.594677Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: unknown state, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-10T12:13:24.594832Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=ZTgyNjQwZTQtOTA3YTY1M2UtNTQ0MjJkN2QtNTRjODEzZDU=, ActorId: [13:7593704773808172230:3216], ActorState: unknown state, LegacyTraceId: 01kekx5mz7f5ec70za8svnd3sa, Session actor destroyed trace_id# 2026-01-10T12:13:24.670098Z node 13 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [13:7593704683613854476:2083], processor id# 72075186224037891, database# /Root/test-dedicated |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TBSV::ShardsNotLeftInShardsToDelete |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] Test command err: 2026-01-10T12:11:17.100348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704373797629399:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:17.100773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:17.393830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:17.418994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:17.419096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:17.472770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:17.540371Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:17.586831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:17.638345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:17.638372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:17.638386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:17.638490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:17.841346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:18.056715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:11:18.064893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:18.089868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T12:11:18.141764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:18.181233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.296505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.351746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.398130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.433980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.472250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.515033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.550986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.588572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:18.626743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:20.459932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386682532904:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:20.460058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:20.460484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386682532915:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:20.460658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704386682532917:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:20.460718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:20.464542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:20.484151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704386682532920:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-10T12:11:20.571136Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704386682532971:3100] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:21.001119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:21.042432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:21.078157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:21.119897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:21.179614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:21.218927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 62 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 43 DataSize: 43 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "hdd2" TotalSize: 43 DataSize: 43 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 } 2026-01-10T12:13:29.448211Z node 2 :SQS DEBUG: service.cpp:521: Request SQS queues list 2026-01-10T12:13:29.448375Z node 2 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE_AND_EXEC 2026-01-10T12:13:29.448477Z node 2 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Serializing params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2026-01-10T12:13:29.448824Z node 2 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O&\nFlags\010Name\010Args\016Payload\022Parameter\016Account CreatedTimestamp\036CustomQueueName\016DlqName\022FifoQueue\020FolderId\034MasterTabletId\022QueueName\024QueueState\014Shards\030TablesFormat\016Version\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\030\207\203\001H\207\203\010\207\203\001H\207\203\001H\207\203\014\207\203\001H\207\203\010\207\203\001H\207\203\010\207\203\010\207\203\004\207\203\010\026\032\036\"&*.26:>BJ\000\003?\006\014queues\t\211\004?:\205\004?:\203\014\020List$Truncated\203\004F\000\t\211\026?B\203\005\004\200\205\030\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\026\032\036\"&*.26:>B\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?H \000\001\205\000\000\000\000\001\'\000\000\000\000\000\000\000?8\005?b\003?J\002\003?L\016\003?N\026\003?P\034\003?R\n\003?T\030\003?V\024\003?X\004\003?Z\010\003?\\\020\003?^\036\003?`\032\377\017\013?h\t\351\000?d\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\236\003?\240\022FROM_USER\003\022\000\t\351\000?f\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2026-01-10T12:13:29.457672Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:29.457712Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 11ms 2026-01-10T12:13:29.458090Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:29.458139Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:13:29.458252Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 12ms 2026-01-10T12:13:29.458730Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:13:29.546929Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704944270036553:2808], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:29.547017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593704944270036554:2809], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:13:29.547066Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:29.547990Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704944270036557:2810], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:29.548069Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:29.552706Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593704944270036551:2807]: Pool not found 2026-01-10T12:13:29.553382Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:13:30.265356Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:51892) connection closed by inactivity timeout |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [FAIL] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore >> PartitionStats::CollectorOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:31.638132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:31.638569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:31.638618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:31.638657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:31.638715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:31.638751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:31.638825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:31.638911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:31.639918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:31.640277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:31.749248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:31.749332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:31.764341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:31.769979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:31.770248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:31.779370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:31.779657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:31.780511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:31.781017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:31.787015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:31.787286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:31.788904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:31.789005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:31.789155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:31.789208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:31.789254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:31.789524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.797970Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:13:31.951637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:31.951965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.952277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:31.952340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:31.952581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:31.952650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:31.955582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:31.955877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:31.956139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.956229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:31.956280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:31.956318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:31.958918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.958990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:31.959045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:31.961375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.961438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:31.961761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:31.961821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:31.965967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:31.968441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:31.968725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:31.970033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:31.970198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:31.970255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:31.970568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:31.970638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:31.970877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:31.970985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:31.973687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:31.973737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... [1:15:2062] 2026-01-10T12:13:32.317059Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:32.317260Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 221us result status StatusPathDoesNotExist 2026-01-10T12:13:32.317438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:32.318381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:408:2384] sender: [1:475:2058] recipient: [1:106:2140] Leader for TabletID 72057594046678944 is [1:408:2384] sender: [1:478:2058] recipient: [1:477:2436] Leader for TabletID 72057594046678944 is [1:479:2437] sender: [1:480:2058] recipient: [1:477:2436] 2026-01-10T12:13:32.380696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:32.380830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:32.380873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:32.380912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:32.380969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:32.381007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:32.381059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:32.381170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:32.382034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:32.382356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:32.399293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:32.401057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:32.401319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:32.401516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:32.401558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:32.401964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:32.402886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1418: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:32.403012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.403119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.403610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.403708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-10T12:13:32.403986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.408215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.408426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.408587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.408763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.408969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.409327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.409451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.409942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.410924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.411871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.422639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:32.424310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:32.424395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:32.425241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:32.425330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:32.425381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:32.448988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:479:2437] sender: [1:541:2058] recipient: [1:15:2062] 2026-01-10T12:13:32.485082Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:32.485388Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 246us result status StatusPathDoesNotExist 2026-01-10T12:13:32.485534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:13:13.930160Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704873381476188:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:13.930211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:14.780751Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:14.888326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:14.888418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:14.898525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:15.054498Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:15.131614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:15.140708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:15.437446Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593704877676443627:2143] Handle TEvNavigate describe path dc-1 2026-01-10T12:13:15.437544Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593704881971411402:2452] HANDLE EvNavigateScheme dc-1 2026-01-10T12:13:15.437708Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593704877676443630:2144], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:15.437891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593704877676443872:2294][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593704877676443630:2144], cookie# 1 2026-01-10T12:13:15.440014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704877676443895:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443892:2294], cookie# 1 2026-01-10T12:13:15.440072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704877676443896:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443893:2294], cookie# 1 2026-01-10T12:13:15.440113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593704877676443897:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443894:2294], cookie# 1 2026-01-10T12:13:15.440171Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704873381475977:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443895:2294], cookie# 1 2026-01-10T12:13:15.440212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704873381475980:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443896:2294], cookie# 1 2026-01-10T12:13:15.440229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593704873381475983:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593704877676443897:2294], cookie# 1 2026-01-10T12:13:15.440278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704877676443895:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704873381475977:2050], cookie# 1 2026-01-10T12:13:15.440297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704877676443896:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704873381475980:2053], cookie# 1 2026-01-10T12:13:15.440313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593704877676443897:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704873381475983:2056], cookie# 1 2026-01-10T12:13:15.440362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704877676443872:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704877676443892:2294], cookie# 1 2026-01-10T12:13:15.440405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593704877676443872:2294][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:13:15.440435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704877676443872:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704877676443893:2294], cookie# 1 2026-01-10T12:13:15.440458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593704877676443872:2294][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:13:15.440493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593704877676443872:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593704877676443894:2294], cookie# 1 2026-01-10T12:13:15.440512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593704877676443872:2294][/dc-1] Sync cookie mismatch: sender# [1:7593704877676443894:2294], cookie# 1, current cookie# 0 2026-01-10T12:13:15.440592Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593704877676443630:2144], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:13:15.451785Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593704877676443630:2144], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593704877676443872:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:13:15.451936Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593704877676443630:2144], cacheItem# { Subscriber: { Subscriber: [1:7593704877676443872:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:13:15.455482Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593704881971411403:2453], recipient# [1:7593704881971411402:2452], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:13:15.455593Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593704881971411402:2452] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:15.556418Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593704881971411402:2452] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:13:15.572687Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593704881971411402:2452] Handle TEvDescribeSchemeResult Forward to# [1:7593704881971411401:2451] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:13:15.580574Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593704877676443627:2143] Handle TEvProposeTransaction 2026-01-10T12:13:15.580606Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593704877676443627:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:13:15.580697Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593704877676443627:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593704881971411409:2458] 2026-01-10T12:13:15.789335Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593704881971411409:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-numbe ... es# AckNotifications: true 2026-01-10T12:13:29.502052Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593704916242774897:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:13:29.502104Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593704942012580046:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704911947807366:2050] 2026-01-10T12:13:29.502141Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593704942012580047:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704911947807369:2053] 2026-01-10T12:13:29.502146Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593704916242774897:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593704942012580029:3021] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:13:29.502175Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593704942012580048:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704911947807372:2056] 2026-01-10T12:13:29.502228Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704942012580030:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704942012580043:3022] 2026-01-10T12:13:29.502276Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704916242774897:2120], cacheItem# { Subscriber: { Subscriber: [3:7593704942012580029:3021] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:13:29.502289Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704942012580030:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704942012580044:3022] 2026-01-10T12:13:29.502325Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7593704942012580030:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7593704916242774897:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:29.502333Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593704911947807366:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704942012580046:3022] 2026-01-10T12:13:29.502352Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593704942012580030:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7593704942012580045:3022] 2026-01-10T12:13:29.502358Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593704911947807369:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704942012580047:3022] 2026-01-10T12:13:29.502376Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593704911947807372:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593704942012580048:3022] 2026-01-10T12:13:29.502408Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593704942012580030:3022][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7593704916242774897:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:29.502415Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593704916242774897:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:13:29.502465Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593704916242774897:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593704942012580030:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:13:29.502522Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704942012580049:3023], recipient# [3:7593704942012580023:2317], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:29.502528Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704916242774897:2120], cacheItem# { Subscriber: { Subscriber: [3:7593704942012580030:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:13:29.502610Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704942012580050:3024], recipient# [3:7593704942012580027:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:30.303218Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704916242774897:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:30.303409Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704916242774897:2120], cacheItem# { Subscriber: { Subscriber: [3:7593704920537742997:2630] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:13:30.303518Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704946307547363:3028], recipient# [3:7593704946307547362:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:30.503623Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593704916242774897:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:13:30.503766Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593704916242774897:2120], cacheItem# { Subscriber: { Subscriber: [3:7593704942012580030:3022] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:13:30.503855Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593704946307547368:3029], recipient# [3:7593704946307547367:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |88.4%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PartitionStats::Collector >> TLocksTest::CK_Range_GoodLock [GOOD] >> PartitionStats::Collector [GOOD] |88.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> CompressExecutor::TestExecutorMemUsage [GOOD] >> Compression::WriteRAW >> TLocksTest::MultipleLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:33.332905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:33.333007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:33.333046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:33.333100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:33.333170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:33.333205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:33.333296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:33.333383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:33.334290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:33.334577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:33.433487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:33.433543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:33.453573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:33.453675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:33.453884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:33.462186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:33.462404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:33.463071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:33.463360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:33.465663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:33.465839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:33.467062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:33.467124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:33.467188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:33.467233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:33.467303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:33.467571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.476131Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:33.622599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:33.622871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.623126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:33.623180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:33.623437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:33.623517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:33.626082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:33.626340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:33.626547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.626621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:33.626660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:33.626700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:33.628917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.628982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:33.629032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:33.631733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.631789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:33.631854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:33.631913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:33.636044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:33.638057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:33.638264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:33.639452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:33.639589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:33.639638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:33.639935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:33.639994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:33.640202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:33.640280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:33.643644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:33.643718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... d: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:33.756859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:33.756923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:33.756974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:33.757019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:33.757105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:33.757171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:33.757212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:13:33.757259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:33.757298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:13:33.757336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:13:33.757449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:33.757490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:13:33.757554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:13:33.757593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-10T12:13:33.759722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:33.759781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:33.759918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:33.759978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:13:33.760643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:13:33.760721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:33.760753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:33.760892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:33.760949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:33.761133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:13:33.761300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:33.761338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:13:33.761371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:13:33.761994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:33.762093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:33.762133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:13:33.762193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-10T12:13:33.762259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:33.762732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:33.762768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:33.762813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:13:33.763220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:33.763272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:33.763319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:13:33.763353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:13:33.763379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:33.763437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:13:33.763946Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-10T12:13:33.764456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:33.764805Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-10T12:13:33.765308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:13:33.766340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:33.767396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:33.767606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:33.768764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:33.768827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:13:33.769092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:33.769124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:33.769442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:33.769543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:33.769576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2376] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-10T12:13:33.769868Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-10T12:13:33.769942Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-10T12:06:27.164454Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.164512Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.164543Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:06:27.172611Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:27.172674Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.172705Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.173805Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005975s 2026-01-10T12:06:27.174427Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:27.174465Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.174486Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.174533Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008000s 2026-01-10T12:06:27.175030Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:06:27.175058Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.175078Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:06:27.175132Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008764s 2026-01-10T12:06:27.193761Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1768046787193727 2026-01-10T12:06:28.043009Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703132546647829:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:28.043071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:06:28.372247Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:28.380583Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:06:28.955946Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:28.956142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:29.096015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:06:29.112455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:06:29.179173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:29.187025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:29.194510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:06:29.194586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:06:29.332320Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:29.342211Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:06:29.351983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:29.381721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:06:29.399993Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:06:29.408709Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:29.427610Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:06:29.494575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:06:29.636535Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015517s 2026-01-10T12:06:30.033272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002f95/r3tmp/yandexwlZ1EX.tmp 2026-01-10T12:06:30.033306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002f95/r3tmp/yandexwlZ1EX.tmp 2026-01-10T12:06:30.033516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002f95/r3tmp/yandexwlZ1EX.tmp 2026-01-10T12:06:30.033601Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:06:30.177021Z INFO: TTestServer started on Port 29704 GrpcPort 16448 PQClient connected to localhost:16448 2026-01-10T12:06:30.564975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:06:33.012825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703132546647829:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:06:33.012888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:06:35.744479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163352351998:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.749107Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163352351985:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.749243Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.749671Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593703163352352023:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.749738Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:06:35.753792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:06:35.812465Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593703163352352000:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:06:35.916623Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekwt277599a8fnz3eyk0tk9", Request deadline has expired for 0.040522s seconds 2026-01-10T12:06:36.040051Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593703163352352030:2146] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:06:36.647119Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593703162611420197:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:36.648574Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NWEwMTg3MzctMjMzMWYyMTgtN2U4MTA5MzAtZjllYmRjZTc=, ActorId: [1:7593703162611420157:2337], ActorState: ExecuteState, LegacyTraceId: 01kekwt74cdt38vba9scxbdm4f, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:06:36.650175Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593703167647319333:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:06:36.652569Z node 2 :KQP_SESSIO ... er action and tx pending commits 2026-01-10T12:13:26.818761Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:26.818779Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:26.917324Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:26.917371Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:26.917388Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:26.917422Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:26.917439Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.020645Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.020715Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.020736Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.020762Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.020780Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.124345Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.124415Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.124434Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.124462Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.124480Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.228375Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.228431Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.228451Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.228480Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.228508Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.332594Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.332646Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.332668Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.332696Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.332721Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.432782Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.432839Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.432877Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.432905Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.432926Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.533988Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.534036Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.534057Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.534077Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.534091Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.638554Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.638609Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.638630Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.638658Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.638685Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.739791Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.739849Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.739871Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.739899Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.739917Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.839770Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.839820Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.839853Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.839882Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.839910Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:27.943195Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:27.943264Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.943290Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:27.943332Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:27.943353Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:28.046346Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:28.046413Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.046437Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:28.046468Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.046488Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:28.148286Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:28.148343Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.148367Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:28.148400Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.148420Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:28.248436Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:28.248496Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.248526Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:28.248562Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.248584Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:13:28.352309Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:28.352367Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.352392Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:28.352423Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:28.352442Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [FAIL] >> TBackupCollectionTests::MultipleTablesWithIndexes [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes |88.4%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> StatisticsSaveLoad::Delete [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |88.4%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:32.263767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:32.263890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:32.264112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:32.264157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:32.264228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:32.264264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:32.264337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:32.264417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:32.265335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:32.265710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:32.501376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:32.501468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:32.533725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:32.545806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:32.546059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:32.563620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:32.563862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:32.572707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:32.573392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:32.592936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:32.593185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:32.594717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:32.594800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:32.594929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:32.594986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:32.595032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:32.595297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:32.804624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.805753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.805945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:32.806952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000064, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:35.263679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000064 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:35.263740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000064, at schemeshard: 72057594046678944 2026-01-10T12:13:35.263886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-10T12:13:35.264038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:13:35.264097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:13:35.264143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:13:35.264180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:13:35.264263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:13:35.264335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-10T12:13:35.264384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-10T12:13:35.264436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:13:35.264475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-10T12:13:35.264513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-10T12:13:35.264657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-10T12:13:35.264724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-10T12:13:35.264772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 57 2026-01-10T12:13:35.264809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 49], 18446744073709551615 2026-01-10T12:13:35.275225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-10T12:13:35.275340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-01-10T12:13:35.275517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-10T12:13:35.275572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-10T12:13:35.280260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:13:35.280380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:35.280427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:35.280635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:35.280692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:35.280877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 49] 2026-01-10T12:13:35.281077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:35.281115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-10T12:13:35.281221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 129, path id: 49 FAKE_COORDINATOR: Erasing txId 129 2026-01-10T12:13:35.281711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:13:35.281810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:13:35.281842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:13:35.281912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 49], version: 18446744073709551615 2026-01-10T12:13:35.281956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-10T12:13:35.282496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:35.282552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 49], at schemeshard: 72057594046678944 2026-01-10T12:13:35.282623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:13:35.283011Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 2026-01-10T12:13:35.283193Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 2026-01-10T12:13:35.283290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:13:35.283391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:13:35.283426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:13:35.283459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 57 2026-01-10T12:13:35.283505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:13:35.283578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-10T12:13:35.283855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2026-01-10T12:13:35.284609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2026-01-10T12:13:35.287337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-10T12:13:35.287473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:35.293872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-10T12:13:35.294053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-10T12:13:35.294134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2026-01-10T12:13:35.294808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2026-01-10T12:13:35.294865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2026-01-10T12:13:35.295632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-10T12:13:35.295754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-10T12:13:35.295800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:2091:3960] TestWaitNotification: OK eventTxId 129 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> TObjectStorageListingTest::ManyDeletes [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> StatisticsSaveLoad::Simple [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2026-01-10T12:12:38.062448Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704724688439772:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:38.062524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:38.508172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:38.541110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.541208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.629867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.757134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:38.760182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704724688439668:2081] 1768047158034767 != 1768047158034770 2026-01-10T12:12:38.768710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:38.961114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:38.972447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:38.992831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:39.070024Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:39.079699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:39.227084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:39.294838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:42.428327Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:42.428447Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:42.563179Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:42.563290Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:42.566112Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.580196Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704741978573783:2081] 1768047162268623 != 1768047162268626 2026-01-10T12:12:42.607058Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:42.612171Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:42.970396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:43.045448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:43.053193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:43.067814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:43.076290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:43.219483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:43.277677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:43.392254Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:47.088865Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704760386312429:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:47.089077Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:47.139548Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.216015Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.282397Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:47.294255Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:47.294363Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:47.347767Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:47.590207Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:47.591410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:47.599474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:47.620742Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:47.736583Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:47.818586Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:48.030167Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:51.460753Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704778662148328:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:51.460830Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:51.553407Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:51.734426Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:13 ... path: dc-1/.metadata/script_executions 2026-01-10T12:13:09.050328Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:09.128741Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:09.249150Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:14.249176Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593704879093813125:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:14.249244Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:14.327215Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:14.616178Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:14.705445Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:14.705550Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:14.718829Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:14.746001Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:14.986497Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:15.201898Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:15.241875Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:15.253364Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:15.277455Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:15.283112Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:15.454005Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:15.575926Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:20.602649Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593704902622206346:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:20.602952Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:20.603310Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:20.860478Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:20.877874Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:20.877981Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:20.901665Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593704902622206094:2081] 1768047200484206 != 1768047200484209 2026-01-10T12:13:20.966714Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:20.987385Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:21.136184Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:21.582621Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:21.913696Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:21.947744Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:21.956420Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:22.104600Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:22.210289Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:27.517173Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593704935613050055:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:27.517301Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:27.793555Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:27.809519Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:27.809832Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:27.811454Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593704935613050014:2081] 1768047207477187 != 1768047207477190 2026-01-10T12:13:27.838931Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:27.863207Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:27.975015Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:28.288276Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:28.332435Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:28.471466Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:28.561220Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:28.608298Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2026-01-10T12:12:53.280888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704787506456320:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:53.280943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:53.764221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:53.770858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:53.770982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:53.849255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:53.889164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:53.894245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704787506456294:2081] 1768047173279065 != 1768047173279068 2026-01-10T12:12:54.062761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:54.130436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:54.145684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:54.155527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:54.220570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:54.306140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:54.442778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:54.501810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:58.825099Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704809459943093:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:58.825539Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:58.873845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:59.053012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:59.053103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:59.231693Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:59.234274Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:59.244321Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:59.721820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:59.731368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:59.749596Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:12:59.778463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:59.847159Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:59.924105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:59.999370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:03.953076Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704832299137545:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:03.953748Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:03.969218Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:04.122808Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:04.123547Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:04.125790Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:04.125876Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:04.168652Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:04.324201Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:04.443024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:04.480145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:04.551749Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:04.632587Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:04.753879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:04.957395Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:09.499332Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704856137974107:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:09.499518Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:09.514906Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:09.636218Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:09.652320Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mism ... , at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:10.253847Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:10.349956Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:10.512211Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:15.886773Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593704882165943100:2216];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:15.886892Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:16.092384Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:16.151739Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:16.155323Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:16.173264Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593704882165942917:2081] 1768047195836334 != 1768047195836337 2026-01-10T12:13:16.173593Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:16.217493Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:16.400749Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:16.634578Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:16.642822Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:16.666623Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:16.840724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:16.883097Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:16.954822Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:21.870258Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593704909204252362:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:21.870352Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:21.948362Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:22.093399Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:22.093507Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:22.098088Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:22.099595Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593704909204252237:2081] 1768047201818324 != 1768047201818327 2026-01-10T12:13:22.124813Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:22.235203Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:22.662178Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:22.673779Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:22.709399Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:22.844523Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:22.935239Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:22.999356Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:28.783522Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593704938279768472:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:28.804224Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:28.918803Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:29.133742Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:29.133856Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:29.148210Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593704938279768400:2081] 1768047208701096 != 1768047208701099 2026-01-10T12:13:29.165405Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:29.167811Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:29.208212Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:29.630521Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:29.645253Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:29.669848Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:29.676932Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:29.782930Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:29.821367Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:29.855689Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2026-01-10T12:12:36.663172Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704715744204422:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:36.665051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:36.689245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:37.053398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:37.053475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:37.103786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:37.152626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:37.153776Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:37.157330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704715744204221:2081] 1768047156632422 != 1768047156632425 2026-01-10T12:12:37.393019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:37.403493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:37.409001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:37.526262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:37.670477Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:37.741314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:37.809706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:40.869249Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704732704919138:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:40.869456Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:40.916400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:41.016154Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704732704919030:2081] 1768047160863059 != 1768047160863062 2026-01-10T12:12:41.031254Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:41.047871Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:41.047955Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:41.050737Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:41.138740Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:41.283150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:41.293636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:41.303028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:12:41.307263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:41.374503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:41.449904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:45.799155Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593704754729917198:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:45.799205Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:45.842049Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.011505s 2026-01-10T12:12:45.848283Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:45.963785Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:45.963888Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:45.977380Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:46.000917Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:46.114369Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:46.253590Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:46.270930Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:46.336620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:46.406215Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:50.631176Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593704774027816009:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:50.631245Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:50.737020Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:50.868312Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593704774027815958:2081] 1768047170618785 != 1768047170618788 2026-01-10T12:12:50.877787Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:50.877891Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:50.881104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:50.881812Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table ... pose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:08.205603Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:08.461085Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:13.167766Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593704873897851153:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:13.171468Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:13.198690Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:13.301476Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:13.304275Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593704873897851121:2081] 1768047193165163 != 1768047193165166 2026-01-10T12:13:13.334711Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:13.335226Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:13.379184Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:13.473476Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:14.078059Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:14.104993Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:13:14.129045Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:13:14.143361Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:14.324179Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:14.375733Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:14.539337Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:19.896254Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:19.948952Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:20.184191Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:20.206142Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:20.213067Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593704898068674032:2081] 1768047199761007 != 1768047199761010 2026-01-10T12:13:20.258028Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:20.258143Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:20.276921Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:20.742481Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:20.799375Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:20.808728Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:13:20.834440Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:20.892232Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:20.934076Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:21.105530Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:28.148400Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:28.152270Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:28.252853Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:28.260374Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593704934421577995:2081] 1768047207678697 != 1768047207678700 2026-01-10T12:13:28.329540Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:28.329664Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:28.398651Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:28.405020Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:28.760389Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:28.886974Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:29.249507Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:29.260860Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:29.285615Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:29.297442Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:29.453929Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:29.576791Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:13:22.343492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:22.495242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:22.504011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:22.504278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:22.504422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:23.402368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:23.586799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:23.587037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:23.665855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:23.811404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:24.859121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:24.861642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:24.861703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:24.861734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:24.862080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:24.944615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:25.627402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:13:30.382389Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:30.399601Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:13:30.405744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:30.471742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:30.471916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:30.546676Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:30.560847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:31.030272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:31.030391Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:31.031971Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.032731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.033515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.034306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.034646Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.034788Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.034898Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.035082Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.035243Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.057256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:31.417355Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:31.561815Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:13:31.561939Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:13:31.742045Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:13:31.742992Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:13:31.743259Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:13:31.743350Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:13:31.743402Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:13:31.743484Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:13:31.743548Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:13:31.743618Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:13:31.744251Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:13:31.757104Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:31.757220Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:31.833884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:13:31.834128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:13:32.000516Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:13:32.008722Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:13:32.057652Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:13:32.057762Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:13:32.057849Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:13:32.068009Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:13:32.080020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:32.120955Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:13:32.121147Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:13:32.164886Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:13:32.488424Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:13:32.525476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:13:33.011864Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:13:33.134412Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:13:33.134509Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:13:33.871930Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:33.885031Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2737:3365] Owner: [1:2736:3364]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:13:33.885180Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2737:3365] Owner: [1:2736:3364]. Column diff is empty, finishing 2026-01-10T12:13:33.886783Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2746:3368], ActorId: [1:2747:3369], Starting query actor #1 [1:2748:3370] 2026-01-10T12:13:33.886866Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2747:3369], ActorId: [1:2748:3370], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:33.905618Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2747:3369], ActorId: [1:2748:3370], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZGM1OGZkZGItYTFlMDZkY2ItM2RjNTE4MTgtMzI2YjQwZg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:13:34.469928Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2768:3384]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:34.470286Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:13:34.470382Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2770:3386] 2026-01-10T12:13:34.470458Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2770:3386] 2026-01-10T12:13:34.471060Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2771:2991] 2026-01-10T12:13:34.471467Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2770:3386], server id = [2:2771:2991], tablet id = 72075186224037894, status = OK 2026-01-10T12:13:34.471582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2771:2991], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:13:34.471682Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:13:34.472037Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:13:34.476662Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2768:3384], StatRequests.size() = 1 2026-01-10T12:13:34.618178Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:13:34.697101Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2747:3369], ActorId: [1:2748:3370], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZGM1OGZkZGItYTFlMDZkY2ItM2RjNTE4MTgtMzI2YjQwZg==, TxId: 2026-01-10T12:13:34.697198Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2747:3369], ActorId: [1:2748:3370], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZGM1OGZkZGItYTFlMDZkY2ItM2RjNTE4MTgtMzI2YjQwZg==, TxId: 2026-01-10T12:13:34.697539Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2746:3368], ActorId: [1:2747:3369], Got response [1:2748:3370] SUCCESS 2026-01-10T12:13:34.709166Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2787:3391], ActorId: [1:2788:3392], Starting query actor #1 [1:2789:3393] 2026-01-10T12:13:34.709286Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2788:3392], ActorId: [1:2789:3393], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:34.716899Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2788:3392], ActorId: [1:2789:3393], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MjI4ZmZlMGEtNjY1ZDlhYTktNWNiMTZjNDAtNjlmNDBkMGU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-10T12:13:34.756066Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2798:3402]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:34.756621Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:13:34.756675Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2798:3402], StatRequests.size() = 1 2026-01-10T12:13:34.919234Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2788:3392], ActorId: [1:2789:3393], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjI4ZmZlMGEtNjY1ZDlhYTktNWNiMTZjNDAtNjlmNDBkMGU=, TxId: 2026-01-10T12:13:34.919341Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2788:3392], ActorId: [1:2789:3393], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MjI4ZmZlMGEtNjY1ZDlhYTktNWNiMTZjNDAtNjlmNDBkMGU=, TxId: 2026-01-10T12:13:34.919837Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2787:3391], ActorId: [1:2788:3392], Got response [1:2789:3393] SUCCESS 2026-01-10T12:13:34.920867Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2820:3407], ActorId: [1:2821:3408], Starting query actor #1 [1:2822:3409] 2026-01-10T12:13:34.920939Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2821:3408], ActorId: [1:2822:3409], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:34.924317Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2821:3408], ActorId: [1:2822:3409], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NTgwYmYxY2QtYjlmNTM5MzYtNjMwMWU2MzMtMjI1ZWM3NDY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-10T12:13:34.964548Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2831:3418]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:34.964885Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:13:34.964950Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:2831:3418], StatRequests.size() = 1 2026-01-10T12:13:35.231286Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2821:3408], ActorId: [1:2822:3409], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NTgwYmYxY2QtYjlmNTM5MzYtNjMwMWU2MzMtMjI1ZWM3NDY=, TxId: 01kekx70k59ga86ybz9vapnpzc 2026-01-10T12:13:35.231508Z node 1 :STATISTICS WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:2821:3408], ActorId: [1:2822:3409], Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=NTgwYmYxY2QtYjlmNTM5MzYtNjMwMWU2MzMtMjI1ZWM3NDY=, TxId: 01kekx70k59ga86ybz9vapnpzc 2026-01-10T12:13:35.231887Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2820:3407], ActorId: [1:2821:3408], Got response [1:2822:3409] BAD_REQUEST |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:13:23.778274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:23.901580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:23.909353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:23.909743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:23.909825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:24.459598Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:24.659478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:24.659626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:24.713904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:24.834923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:25.928639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:25.929278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:25.929327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:25.929373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:25.929970Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:26.024697Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:27.181755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:13:31.622704Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:31.632576Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:13:31.636889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:31.679830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:31.680013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:31.727362Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:31.730664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:31.926200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.927225Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.930691Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.931878Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.932096Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.932501Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.932696Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.932864Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:31.933084Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:32.132570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:13:32.190253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:32.190403Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:32.205532Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:32.393167Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:32.455861Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:13:32.456022Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:13:32.499507Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:13:32.501770Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:13:32.502054Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:13:32.502151Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:13:32.502228Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:13:32.502297Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:13:32.502359Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:13:32.502420Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:13:32.503526Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:13:32.722039Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:32.722200Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:32.765538Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:13:32.766043Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:13:33.067745Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:13:33.068381Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:13:33.071330Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:13:33.071401Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:13:33.071515Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:13:33.075965Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:13:33.088623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:33.103927Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:13:33.104216Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:13:33.137151Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:13:33.413292Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:13:33.776348Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:13:33.916851Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:13:33.916920Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:13:34.692890Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:34.701143Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2774:3377] Owner: [1:2773:3376]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:13:34.701276Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2774:3377] Owner: [1:2773:3376]. Column diff is empty, finishing 2026-01-10T12:13:34.701945Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2783:3380], ActorId: [1:2784:3381], Starting query actor #1 [1:2785:3382] 2026-01-10T12:13:34.702052Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2784:3381], ActorId: [1:2785:3382], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:34.733454Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2784:3381], ActorId: [1:2785:3382], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NDA0ZGRlMDAtMzRkMWRlYWQtZTZmMGRjYzktMzk2Yzg1Njg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:13:35.210848Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2805:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:35.211184Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:13:35.211298Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2807:3398] 2026-01-10T12:13:35.211416Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2807:3398] 2026-01-10T12:13:35.212216Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2808:2992] 2026-01-10T12:13:35.212592Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2807:3398], server id = [2:2808:2992], tablet id = 72075186224037894, status = OK 2026-01-10T12:13:35.212827Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2808:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:13:35.212921Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:13:35.213194Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:13:35.213268Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2805:3396], StatRequests.size() = 1 2026-01-10T12:13:35.353319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:13:35.419771Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2784:3381], ActorId: [1:2785:3382], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NDA0ZGRlMDAtMzRkMWRlYWQtZTZmMGRjYzktMzk2Yzg1Njg=, TxId: 2026-01-10T12:13:35.419889Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2784:3381], ActorId: [1:2785:3382], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NDA0ZGRlMDAtMzRkMWRlYWQtZTZmMGRjYzktMzk2Yzg1Njg=, TxId: 2026-01-10T12:13:35.420361Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2783:3380], ActorId: [1:2784:3381], Got response [1:2785:3382] SUCCESS 2026-01-10T12:13:35.421629Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2824:3403], ActorId: [1:2825:3404], Starting query actor #1 [1:2826:3405] 2026-01-10T12:13:35.421717Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2825:3404], ActorId: [1:2826:3405], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:35.425062Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2825:3404], ActorId: [1:2826:3405], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MzFkMjk2OTktY2M5MjYxYmQtY2IwNjBmOC0xNmExNWZkYg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-10T12:13:35.470992Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2835:3414]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:35.471326Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:13:35.471393Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2835:3414], StatRequests.size() = 1 2026-01-10T12:13:35.680219Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2825:3404], ActorId: [1:2826:3405], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MzFkMjk2OTktY2M5MjYxYmQtY2IwNjBmOC0xNmExNWZkYg==, TxId: 01kekx71101xdz4ecjwns70yct 2026-01-10T12:13:35.680415Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2825:3404], ActorId: [1:2826:3405], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MzFkMjk2OTktY2M5MjYxYmQtY2IwNjBmOC0xNmExNWZkYg==, TxId: 01kekx71101xdz4ecjwns70yct 2026-01-10T12:13:35.680730Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2824:3403], ActorId: [1:2825:3404], Got response [1:2826:3405] SUCCESS 2026-01-10T12:13:35.683291Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2856:3424], ActorId: [1:2857:3425], Starting query actor #1 [1:2858:3426] 2026-01-10T12:13:35.683421Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2857:3425], ActorId: [1:2858:3426], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:13:35.687754Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2857:3425], ActorId: [1:2858:3426], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=M2YxNWMwZWItMTQxOWI4YmQtYzVjODFiMGItNWRiODIzYWE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-10T12:13:35.711621Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2857:3425], ActorId: [1:2858:3426], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=M2YxNWMwZWItMTQxOWI4YmQtYzVjODFiMGItNWRiODIzYWE=, TxId: 01kekx7128cjk31x3gcv0qhbb1 2026-01-10T12:13:35.711804Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2857:3425], ActorId: [1:2858:3426], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=M2YxNWMwZWItMTQxOWI4YmQtYzVjODFiMGItNWRiODIzYWE=, TxId: 01kekx7128cjk31x3gcv0qhbb1 2026-01-10T12:13:35.712249Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2856:3424], ActorId: [1:2857:3425], Got response [1:2858:3426] SUCCESS >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [FAIL] >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore [GOOD] >> TBackupCollectionTests::BackupRestoreCoveringIndex >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::Create ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2026-01-10T12:12:56.951272Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704802404098215:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:56.951445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:56.986649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:57.581457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:57.581541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:57.661528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:58.014306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:58.020760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:58.021378Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704802404097995:2081] 1768047176884322 != 1768047176884325 2026-01-10T12:12:58.021571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:12:58.024068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:58.028485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:58.028519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:58.037327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:58.286318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:12:58.406985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:58.424996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:12:58.441262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:12:58.551395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:01.946371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704802404098215:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:01.946455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:06.452761Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704842508544202:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:06.452929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:06.494833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:13:06.576036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:06.579424Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:06.580290Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704842508544092:2081] 1768047186446796 != 1768047186446799 2026-01-10T12:13:06.580336Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:06.604050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:06.677331Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:06.756163Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:06.756194Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:06.756203Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:06.756282Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:07.027525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:07.033068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:07.060804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:13:07.171443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:07.456785Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; .2026-01-10T12:13:11.470200Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704842508544202:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:11.470344Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; .2026-01-10T12:13:21.570005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:13:21.570045Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:22.638350Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:13:22.639776Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2026-01-10T12:13:22.640235Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-10T12:13:22.641190Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2026-01-10T12:13:22.641522Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2026-01-10T12:13:22.642299Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2026-01-10T12:13:22.642565Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-10T12:13:22.643388Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2026-01-10T12:13:22.645158Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:13:22.650346Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-10T12:13:22.650499Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-10T12:13:22.650563Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2026-01-10T12:13:22.680754Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1768047202698 at tablet 72075186224037892 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047202698 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-10T12:13:22.680814Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:13:22.681235Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1768047202698 at tablet 72075186224037889 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047202698 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-10T12:13:22.681251Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:13:22.681367Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:13:22.681390Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:13:22.681430Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768047202698:281474976711500] in PlanQueue unit at 72075186224037889 2026-01-10T12:13:22.681476Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037889 got data tx from cache 1768047202698:281474976711500 2026-01-10T12:13:22.686328Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 176804720 ... 2026-01-10T12:13:36.383404Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711911 at step 1768047216418 at tablet 72075186224037892 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047216418 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-10T12:13:36.383429Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:13:36.383550Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:13:36.383563Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:13:36.383578Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768047216418:281474976711911] in PlanQueue unit at 72075186224037892 2026-01-10T12:13:36.383606Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1768047216418:281474976711911 2026-01-10T12:13:36.384545Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-10T12:13:36.384574Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:13:36.384725Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-10T12:13:36.384748Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:13:36.384912Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1768047216418} 2026-01-10T12:13:36.384952Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-10T12:13:36.384996Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047216418 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7593704971357576484:5679], exec latency: 0 ms, propose latency: 6 ms 2026-01-10T12:13:36.385018Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:13:36.385306Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1768047216418} 2026-01-10T12:13:36.385382Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:13:36.386177Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1768047216418} 2026-01-10T12:13:36.386212Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037889 restored its data 2026-01-10T12:13:36.386370Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:13:36.387190Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-10T12:13:36.388146Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-10T12:13:36.388170Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:13:36.388323Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1768047216418} 2026-01-10T12:13:36.388370Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:13:36.388800Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-10T12:13:36.388836Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:13:36.389075Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-10T12:13:36.390000Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-10T12:13:36.390022Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:13:36.395430Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:13:36.396570Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-10T12:13:36.396815Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:13:36.397470Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:13:36.397560Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-10T12:13:36.398324Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:13:36.398871Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:13:36.398920Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047216418 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7593704971357576484:5679], exec latency: 17 ms, propose latency: 19 ms 2026-01-10T12:13:36.398944Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:13:36.399578Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:13:36.399616Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047216418 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7593704971357576484:5679], exec latency: 14 ms, propose latency: 15 ms 2026-01-10T12:13:36.399633Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:13:36.405372Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:13:36.406418Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037889 restored its data 2026-01-10T12:13:36.411462Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:13:36.413549Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:13:36.413618Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047216418 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7593704971357576484:5679], exec latency: 35 ms, propose latency: 37 ms 2026-01-10T12:13:36.413644Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:13:36.449957Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.450251Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2026-01-10T12:13:36.450579Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.450710Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.450961Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.453575Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.460451Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:13:36.461321Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2026-01-10T12:13:36.461936Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2026-01-10T12:13:36.462548Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2026-01-10T12:13:36.462694Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] Test command err: Starting YDB, grpc: 2489, msgbus: 17650 2026-01-10T12:09:44.802968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703974359759236:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:44.803031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:45.486217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.722993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.831712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:45.831826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:45.912793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:46.107995Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:46.122039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:46.431656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.464371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:46.464393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:46.464399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:46.464550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:47.094223Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703974359759271:2119] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:47.094314Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703987244661992:2741] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:47.094699Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703987244661992:2741] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.189057Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703987244661992:2741] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:47.204533Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703987244661992:2741] Handle TEvDescribeSchemeResult Forward to# [1:7593703987244661991:2740] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:47.206195Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703974359759271:2119] Handle TEvProposeTransaction 2026-01-10T12:09:47.206219Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703974359759271:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:09:47.206321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703974359759271:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593703987244662002:2747] 2026-01-10T12:09:47.402329Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703987244662002:2747] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.406389Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703987244662002:2747] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.406435Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703987244662002:2747] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.406516Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703987244662002:2747] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.406829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703987244662002:2747] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.407010Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703987244662002:2747] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:47.407096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703987244662002:2747] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:09:47.407281Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703987244662002:2747] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:09:47.408167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:47.415660Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703987244662002:2747] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:09:47.415736Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703987244662002:2747] txid# 281474976710657 SEND to# [1:7593703987244662001:2746] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:09:47.438472Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703974359759271:2119] Handle TEvProposeTransaction 2026-01-10T12:09:47.438496Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703974359759271:2119] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:09:47.438523Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703974359759271:2119] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7593703987244662039:2778] 2026-01-10T12:09:47.441182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703987244662039:2778] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.441245Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703987244662039:2778] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.441262Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703987244662039:2778] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.441317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703987244662039:2778] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.441586Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703987244662039:2778] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.441688Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703987244662039:2778] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:47.441741Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703987244662039:2778] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:09:47.441856Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703987244662039:2778] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:09:47.442450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:47.447829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703987244662039:2778] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-10T12:09:47.447878Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703987244662039:2778] txid# 281474976710658 SEND to# [1:7593703987244662038:2777] Source {TEvProposeTrans ... 75033:2757] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:17.787495Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704891380375033:2757] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:17.787562Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704891380375033:2757] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:13:17.787711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704891380375033:2757] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:13:17.788385Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:17.793672Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704891380375033:2757] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:13:17.793737Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704891380375033:2757] txid# 281474976710657 SEND to# [59:7593704891380375032:2756] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:13:17.821577Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704891380374112:2119] Handle TEvProposeTransaction 2026-01-10T12:13:17.821602Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704891380374112:2119] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:13:17.821632Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704891380374112:2119] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7593704891380375069:2790] 2026-01-10T12:13:17.824299Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704891380375069:2790] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:17.824387Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704891380375069:2790] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:13:17.824411Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704891380375069:2790] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:17.824492Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704891380375069:2790] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:17.824856Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704891380375069:2790] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:17.824993Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704891380375069:2790] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:17.825075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704891380375069:2790] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:13:17.825239Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704891380375069:2790] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:13:17.825985Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:17.832031Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704891380375069:2790] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-10T12:13:17.832365Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704891380375069:2790] txid# 281474976710658 SEND to# [59:7593704891380375068:2789] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-10T12:13:17.869201Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704891380374112:2119] Handle TEvProposeTransaction 2026-01-10T12:13:17.869243Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704891380374112:2119] TxId# 281474976710659 ProcessProposeTransaction 2026-01-10T12:13:17.869328Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704891380374112:2119] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [59:7593704891380375133:2846] 2026-01-10T12:13:17.872969Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704891380375133:2846] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39960" 2026-01-10T12:13:17.873068Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704891380375133:2846] txid# 281474976710659 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:13:17.873096Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704891380375133:2846] txid# 281474976710659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:17.873165Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704891380375133:2846] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:17.873981Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704891380375133:2846] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:17.874144Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704891380375133:2846] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:17.874231Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704891380375133:2846] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2026-01-10T12:13:17.874462Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704891380375133:2846] txid# 281474976710659 HANDLE EvClientConnected 2026-01-10T12:13:17.875272Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:17.878657Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704891380375133:2846] txid# 281474976710659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710659} 2026-01-10T12:13:17.878729Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704891380375133:2846] txid# 281474976710659 SEND to# [59:7593704891380375132:2306] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 48} 2026-01-10T12:13:18.132236Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:22.108208Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593704891380373994:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:22.108298Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:23.616727Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6fpb8xj9h8he3cmyx4nf", Request deadline has expired for 0.728604s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16360 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+3148 (0x1AD3879C) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7FF1D1B83D90) __libc_start_main+128 (0x7FF1D1B83E40) _start+41 (0x1871E029) |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TBackupCollectionTests::IncrementalBackupWithIndexes [GOOD] >> TBackupCollectionTests::OmitIndexesFlag >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 3721, msgbus: 26493 2026-01-10T12:09:44.152365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703975038682168:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:44.152423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:44.416221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.574338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:44.574478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:44.618341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:44.653487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:44.666427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.886461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:44.886483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:44.886490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:44.886563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:45.176619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:45.198230Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703975038682345:2119] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:45.198311Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703979333650454:2724] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:45.198617Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703979333650454:2724] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.260609Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703979333650454:2724] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:45.267056Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703979333650454:2724] Handle TEvDescribeSchemeResult Forward to# [1:7593703979333650453:2723] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:45.273322Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703975038682345:2119] Handle TEvProposeTransaction 2026-01-10T12:09:45.273355Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703975038682345:2119] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:45.273412Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703975038682345:2119] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593703979333650459:2729] 2026-01-10T12:09:45.605752Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703979333650459:2729] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.605854Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703979333650459:2729] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.605875Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703979333650459:2729] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.605965Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703979333650459:2729] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.606265Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703979333650459:2729] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.606370Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703979333650459:2729] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:45.606471Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703979333650459:2729] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:45.606614Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703979333650459:2729] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:45.607273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:45.617367Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703979333650459:2729] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:45.617432Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703979333650459:2729] txid# 281474976715657 SEND to# [1:7593703979333650458:2728] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:45.663853Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703975038682345:2119] Handle TEvProposeTransaction 2026-01-10T12:09:45.663882Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703975038682345:2119] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:09:45.663912Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703975038682345:2119] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7593703979333650499:2763] 2026-01-10T12:09:45.666703Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703979333650499:2763] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.666770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703979333650499:2763] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.666785Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703979333650499:2763] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.666857Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703979333650499:2763] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.667150Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703979333650499:2763] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.667242Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703979333650499:2763] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:45.667347Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703979333650499:2763] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:09:45.667506Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703979333650499:2763] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:09:45.668067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:45.672255Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703979333650499:2763] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:09:45.672311Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703979333650499:2763] txid# 281474976715658 SEND to# [1:7593703979333650498:2762] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:09:45.780672Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703975038682345:2119] Handle TEvProposeTransaction 2026-01-10T12:09:45.780708Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237 ... ndle TEvProposeTransaction 2026-01-10T12:13:19.680918Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704872088564527:2100] TxId# 281474976710661 ProcessProposeTransaction 2026-01-10T12:13:19.680970Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704872088564527:2100] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7593704897858369707:2981] 2026-01-10T12:13:19.684903Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704897858369707:2981] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2026-01-10T12:13:19.684965Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704897858369707:2981] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:19.684988Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704897858369707:2981] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2026-01-10T12:13:19.685157Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7593704897858369707:2981] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-10T12:13:19.685188Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7593704897858369707:2981] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-10T12:13:19.686020Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7593704897858369707:2981] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:13:19.686112Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704897858369707:2981] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:19.686336Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704897858369707:2981] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:19.686431Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704897858369707:2981] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:19.686542Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704897858369707:2981] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-10T12:13:19.686688Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704897858369707:2981] txid# 281474976710661 HANDLE EvClientConnected 2026-01-10T12:13:19.701486Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704897858369707:2981] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-10T12:13:19.701647Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7593704897858369707:2981] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:13:19.701687Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704897858369707:2981] txid# 281474976710661 SEND to# [59:7593704897858369636:2339] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-10T12:13:19.737450Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704872088564527:2100] Handle TEvProposeTransaction 2026-01-10T12:13:19.737478Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704872088564527:2100] TxId# 281474976710662 ProcessProposeTransaction 2026-01-10T12:13:19.737524Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704872088564527:2100] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7593704897858369731:2993] 2026-01-10T12:13:19.740594Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704897858369731:2993] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39366" 2026-01-10T12:13:19.740672Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704897858369731:2993] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:19.740695Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704897858369731:2993] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:19.740751Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704897858369731:2993] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:19.741129Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704897858369731:2993] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:19.741239Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704897858369731:2993] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:19.799721Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704897858369731:2993] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-10T12:13:19.799897Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704897858369731:2993] txid# 281474976710662 HANDLE EvClientConnected 2026-01-10T12:13:19.803393Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704897858369731:2993] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-10T12:13:19.803440Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704897858369731:2993] txid# 281474976710662 SEND to# [59:7593704897858369730:2331] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-10T12:13:19.901009Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704872088564527:2100] Handle TEvProposeTransaction 2026-01-10T12:13:19.901042Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704872088564527:2100] TxId# 281474976710663 ProcessProposeTransaction 2026-01-10T12:13:19.901087Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704872088564527:2100] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7593704897858369764:3008] 2026-01-10T12:13:19.905052Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704897858369764:3008] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39388" 2026-01-10T12:13:19.905136Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704897858369764:3008] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:19.905163Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704897858369764:3008] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-01-10T12:13:19.905340Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7593704897858369764:3008] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-10T12:13:19.905379Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7593704897858369764:3008] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-10T12:13:19.905434Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704897858369764:3008] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:19.905722Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704897858369764:3008] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:19.905750Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7593704897858369764:3008] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2026-01-10T12:13:19.905819Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7593704897858369764:3008] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-01-10T12:13:19.905841Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704897858369764:3008] txid# 281474976710663 SEND to# [59:7593704897858369763:2348] Source {TEvProposeTransactionStatus Status# 5} 2026-01-10T12:13:19.906382Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=NGQyMTkyNGItMjM1YTgyMGEtMzJlMzRlMzQtNTJmNzEwYTI=, ActorId: [59:7593704897858369749:2348], ActorState: ExecuteState, LegacyTraceId: 01kekx6hksdhnd734z52x1f244, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-10T12:13:19.906936Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7593704872088564527:2100] Handle TEvExecuteKqpTransaction 2026-01-10T12:13:19.906956Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7593704872088564527:2100] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 4297, msgbus: 26666 2026-01-10T12:09:43.805429Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703971781093689:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:43.815380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:44.177940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.228875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:44.229010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:44.239437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:44.340223Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:44.480889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.518660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:44.518682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:44.518692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:44.518777Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:44.819331Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703971781093924:2137] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:44.819384Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703976076062027:2745] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:44.819698Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703976076062027:2745] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:44.836414Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:44.871303Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703976076062027:2745] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:44.880621Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703976076062027:2745] Handle TEvDescribeSchemeResult Forward to# [1:7593703976076062018:2742] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:44.896399Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703971781093924:2137] Handle TEvProposeTransaction 2026-01-10T12:09:44.896427Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703971781093924:2137] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:44.896491Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703971781093924:2137] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593703976076062036:2751] 2026-01-10T12:09:45.134568Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703976076062036:2751] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.134657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703976076062036:2751] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:09:45.134672Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703976076062036:2751] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.134731Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703976076062036:2751] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.134996Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703976076062036:2751] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.135096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703976076062036:2751] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:45.135168Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703976076062036:2751] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:45.135282Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703976076062036:2751] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:45.135881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:45.139395Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703976076062036:2751] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:45.139455Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703976076062036:2751] txid# 281474976715657 SEND to# [1:7593703976076062035:2750] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:45.186660Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703971781093924:2137] Handle TEvProposeTransaction 2026-01-10T12:09:45.186685Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703971781093924:2137] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:09:45.186713Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703971781093924:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7593703980371029371:2787] 2026-01-10T12:09:45.189487Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703980371029371:2787] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.189552Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703980371029371:2787] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:09:45.189572Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703980371029371:2787] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.189629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703980371029371:2787] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.190019Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703980371029371:2787] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.190108Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703980371029371:2787] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:45.190145Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703980371029371:2787] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:09:45.190269Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703980371029371:2787] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:09:45.190806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:45.194401Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703980371029371:2787] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:09:45.194472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703980371029371:2787] txid# 281474976715658 SEND to# [1:7593703980371029370:2786] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:09:47.644661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703988960964108:2329], DatabaseId: /dc-1, PoolId: default, Failed t ... ctRequired# true 2026-01-10T12:13:29.876510Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704943714724194:2933] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-10T12:13:29.876755Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704943714724194:2933] txid# 281474976710661 HANDLE EvClientConnected 2026-01-10T12:13:29.881764Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704943714724194:2933] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2026-01-10T12:13:29.881838Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704943714724194:2933] txid# 281474976710661 SEND to# [59:7593704943714724193:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-10T12:13:29.975747Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704917944919140:2113] Handle TEvProposeTransaction 2026-01-10T12:13:29.975778Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704917944919140:2113] TxId# 281474976710662 ProcessProposeTransaction 2026-01-10T12:13:29.975832Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704917944919140:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7593704943714724224:2954] 2026-01-10T12:13:29.979223Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704943714724224:2954] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37982" 2026-01-10T12:13:29.979314Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704943714724224:2954] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:29.979342Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704943714724224:2954] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:29.979409Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704943714724224:2954] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:29.979839Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704943714724224:2954] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:29.979998Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704943714724224:2954] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:29.980307Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704943714724224:2954] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-10T12:13:29.980527Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704943714724224:2954] txid# 281474976710662 HANDLE EvClientConnected 2026-01-10T12:13:29.981409Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:29.988238Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704943714724224:2954] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-10T12:13:29.988303Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704943714724224:2954] txid# 281474976710662 SEND to# [59:7593704943714724223:2342] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-10T12:13:30.060883Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704917944919140:2113] Handle TEvProposeTransaction 2026-01-10T12:13:30.060915Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704917944919140:2113] TxId# 281474976710663 ProcessProposeTransaction 2026-01-10T12:13:30.060968Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704917944919140:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7593704948009691605:3025] 2026-01-10T12:13:30.064357Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704948009691605:3025] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37992" 2026-01-10T12:13:30.064443Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704948009691605:3025] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:30.064468Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704948009691605:3025] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:30.064524Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704948009691605:3025] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:30.064894Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704948009691605:3025] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:30.064987Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704948009691605:3025] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:30.109030Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704948009691605:3025] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-10T12:13:30.109255Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704948009691605:3025] txid# 281474976710663 HANDLE EvClientConnected 2026-01-10T12:13:30.113293Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704948009691605:3025] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-10T12:13:30.113353Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704948009691605:3025] txid# 281474976710663 SEND to# [59:7593704948009691604:2344] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-10T12:13:30.176868Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704917944919140:2113] Handle TEvProposeTransaction 2026-01-10T12:13:30.176915Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704917944919140:2113] TxId# 281474976710664 ProcessProposeTransaction 2026-01-10T12:13:30.176980Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704917944919140:2113] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7593704948009691636:3040] 2026-01-10T12:13:30.180340Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704948009691636:3040] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2ODA5MDQwOSwiaWF0IjoxNzY4MDQ3MjA5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.VBTHc4rIf0azKpWscEAV5dCLc3FINAeselWRFEXc6tRdMgvKlo2QAUQUxxO0lf2apdH_VVCqYBXzi7Ty251STN6n0wlPfFWYc9O7CiUcsqh52AmL2ql_W_mB8pDcR_GD_oVaDIYDufyUwOroemGJrqVP0TVPr9rBY8B6jHn-IylPAC42KdHfDnP6BfclhS09DMD0mscLry0mlGVXMdw4I_jKZFILZu5alui8uqkmNIwzoQUfkBIYyhw4HHokIxUYba28fccR9M78pvd_S8ni6pNZB-g4i3cm7C4VXOmcYEDBbyjuvlqvQbCSIbjoiPF-amM0R1MnFrQj8oiZWPzhWQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2ODA5MDQwOSwiaWF0IjoxNzY4MDQ3MjA5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38020" 2026-01-10T12:13:30.180426Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704948009691636:3040] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:30.180450Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704948009691636:3040] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-10T12:13:30.180676Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7593704948009691636:3040] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-10T12:13:30.180765Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7593704948009691636:3040] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-10T12:13:30.180843Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704948009691636:3040] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:30.181276Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704948009691636:3040] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:30.181315Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7593704948009691636:3040] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2026-01-10T12:13:30.181439Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7593704948009691636:3040] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-10T12:13:30.181471Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704948009691636:3040] txid# 281474976710664 SEND to# [59:7593704948009691635:2356] Source {TEvProposeTransactionStatus Status# 5} 2026-01-10T12:13:30.181900Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=Y2RhYjJhZDktYzQxZTk4ZDItM2M1YzU1NjMtYjNlNDQ5ZGE=, ActorId: [59:7593704948009691626:2356], ActorState: ExecuteState, LegacyTraceId: 01kekx6vnd0tgd3f96wjrh707c, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-10T12:13:30.182178Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7593704917944919140:2113] Handle TEvExecuteKqpTransaction 2026-01-10T12:13:30.182201Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7593704917944919140:2113] TxId# 281474976710665 ProcessProposeKqpTransaction |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [FAIL] Test command err: Starting YDB, grpc: 1739, msgbus: 16456 2026-01-10T12:09:45.585152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703982084413678:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:45.585205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:45.996179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.055293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:46.055440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:46.096416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:46.137802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:46.395666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.483616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:46.483637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:46.483643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:46.483771Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:46.616370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:46.780845Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703982084413890:2128] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:46.780917Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703986379382003:2739] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:46.781322Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703986379382003:2739] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.839675Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703986379382003:2739] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:46.846687Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703986379382003:2739] Handle TEvDescribeSchemeResult Forward to# [1:7593703986379382002:2738] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:46.852954Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703982084413890:2128] Handle TEvProposeTransaction 2026-01-10T12:09:46.852994Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703982084413890:2128] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:46.853060Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703982084413890:2128] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593703986379382008:2744] 2026-01-10T12:09:46.970023Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703986379382008:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.970126Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703986379382008:2744] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.970143Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703986379382008:2744] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.970221Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703986379382008:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.970591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703986379382008:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.970751Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703986379382008:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:46.970869Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703986379382008:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:46.971014Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703986379382008:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:46.971909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:46.974553Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703986379382008:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:46.974631Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703986379382008:2744] txid# 281474976715657 SEND to# [1:7593703986379382007:2743] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:46.986849Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703982084413890:2128] Handle TEvProposeTransaction 2026-01-10T12:09:46.986871Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703982084413890:2128] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:09:46.986897Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703982084413890:2128] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7593703986379382045:2778] 2026-01-10T12:09:46.989317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703986379382045:2778] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.989388Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703986379382045:2778] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.989403Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703986379382045:2778] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.989449Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703986379382045:2778] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.989758Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703986379382045:2778] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.989857Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703986379382045:2778] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:46.989902Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703986379382045:2778] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:09:46.990017Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703986379382045:2778] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:09:46.990534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:46.993033Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703986379382045:2778] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:09:46.993078Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703986379382045:2778] txid# 281474976715658 SEND to# [1:7593703986379382044:2777] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:09:47.048163Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703982084413890:2128] Handle TEvProposeTransaction 2026-01-10T12:09:47.048207Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237 ... TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:21.968345Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704910253847274:2755] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:21.968438Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704910253847274:2755] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:13:21.968580Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704910253847274:2755] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:13:21.969254Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:21.971842Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704910253847274:2755] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:13:21.971891Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704910253847274:2755] txid# 281474976715657 SEND to# [59:7593704910253847273:2754] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:13:22.032247Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704905958878937:2106] Handle TEvProposeTransaction 2026-01-10T12:13:22.032284Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704905958878937:2106] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:13:22.032334Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704905958878937:2106] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7593704914548814603:2785] 2026-01-10T12:13:22.035908Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704914548814603:2785] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:22.036013Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704914548814603:2785] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:22.036042Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704914548814603:2785] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:22.040522Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704914548814603:2785] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:22.040842Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704914548814603:2785] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:22.040968Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704914548814603:2785] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:22.041039Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704914548814603:2785] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:13:22.041200Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704914548814603:2785] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:13:22.041823Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:22.048371Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704914548814603:2785] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:13:22.048437Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704914548814603:2785] txid# 281474976715658 SEND to# [59:7593704914548814602:2784] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:13:22.112657Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704905958878937:2106] Handle TEvProposeTransaction 2026-01-10T12:13:22.112688Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704905958878937:2106] TxId# 281474976715659 ProcessProposeTransaction 2026-01-10T12:13:22.112734Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704905958878937:2106] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7593704914548814676:2843] 2026-01-10T12:13:22.115711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704914548814676:2843] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000\n\035\010\000\022\031\010\001\020\200\010\032\020db_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:48740" 2026-01-10T12:13:22.115794Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704914548814676:2843] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:22.115815Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704914548814676:2843] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:22.115871Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704914548814676:2843] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:22.116447Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704914548814676:2843] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:22.116604Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704914548814676:2843] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:22.116679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704914548814676:2843] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2026-01-10T12:13:22.116818Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704914548814676:2843] txid# 281474976715659 HANDLE EvClientConnected 2026-01-10T12:13:22.117519Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:22.123735Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704914548814676:2843] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2026-01-10T12:13:22.123793Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704914548814676:2843] txid# 281474976715659 SEND to# [59:7593704914548814675:2307] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2026-01-10T12:13:22.130672Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:26.010054Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593704910253846239:2094];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:26.010150Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:31.290842Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6ktw4m4gf1vs2n6mpc56", Request deadline has expired for 4.152725s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28757 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+3148 (0x1AD3879C) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7FD802F50D90) __libc_start_main+128 (0x7FD802F50E40) _start+41 (0x1871E029) |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> StatisticsSaveLoad::ForbidAccess [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 13789, msgbus: 11956 2026-01-10T12:09:46.333604Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703985579319774:2244];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:46.399394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:46.652397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.846614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:46.846702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:46.869007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:46.984200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:46.991084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:47.187221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:47.187262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:47.187279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:47.187426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:47.336495Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:47.493327Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703985579319724:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:47.493377Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703989874287906:2731] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:47.493820Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703989874287906:2731] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.544624Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703989874287906:2731] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:47.554621Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703989874287906:2731] Handle TEvDescribeSchemeResult Forward to# [1:7593703989874287905:2730] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:47.564795Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703985579319724:2118] Handle TEvProposeTransaction 2026-01-10T12:09:47.564828Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703985579319724:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:47.564895Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703985579319724:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593703989874287914:2736] 2026-01-10T12:09:47.664439Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703989874287914:2736] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.664531Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703989874287914:2736] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.664548Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703989874287914:2736] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.664624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703989874287914:2736] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.664953Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703989874287914:2736] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.665092Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703989874287914:2736] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:47.665203Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703989874287914:2736] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:47.665343Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703989874287914:2736] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:47.666089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:47.670307Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703989874287914:2736] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:47.670372Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703989874287914:2736] txid# 281474976715657 SEND to# [1:7593703989874287913:2735] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:47.698791Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703985579319724:2118] Handle TEvProposeTransaction 2026-01-10T12:09:47.698822Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703985579319724:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:09:47.698857Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703985579319724:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7593703989874287950:2769] 2026-01-10T12:09:47.701686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703989874287950:2769] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.701759Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703989874287950:2769] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.701801Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703989874287950:2769] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.701861Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703989874287950:2769] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.702164Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703989874287950:2769] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.702278Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703989874287950:2769] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:47.702403Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703989874287950:2769] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:09:47.702564Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703989874287950:2769] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:09:47.703154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:47.710972Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703989874287950:2769] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:09:47.711029Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703989874287950:2769] txid# 281474976715658 SEND to# [1:7593703989874287949:2768] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:09:47.784505Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703985579319724:2118] Handle TEvProposeTransaction 2026-01-10T12:09:47.784537Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:23 ... :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704930302250533:2750] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:26.181294Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704930302250533:2750] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:26.181714Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704930302250533:2750] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:26.181825Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704930302250533:2750] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:26.181929Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704930302250533:2750] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:13:26.182142Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704930302250533:2750] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:13:26.182927Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:26.190068Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704930302250533:2750] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:13:26.190147Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704930302250533:2750] txid# 281474976715657 SEND to# [59:7593704930302250532:2749] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:13:26.228436Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704926007282212:2113] Handle TEvProposeTransaction 2026-01-10T12:13:26.228474Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704926007282212:2113] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:13:26.228522Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704926007282212:2113] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7593704930302250569:2783] 2026-01-10T12:13:26.231819Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704930302250569:2783] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:26.231902Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704930302250569:2783] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:26.231927Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704930302250569:2783] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:26.231996Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704930302250569:2783] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:26.236816Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704930302250569:2783] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:26.237010Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704930302250569:2783] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:26.237115Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704930302250569:2783] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:13:26.237297Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704930302250569:2783] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:13:26.238104Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:26.252915Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704930302250569:2783] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:13:26.252997Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704930302250569:2783] txid# 281474976715658 SEND to# [59:7593704930302250568:2782] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:13:26.305966Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704926007282212:2113] Handle TEvProposeTransaction 2026-01-10T12:13:26.306002Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704926007282212:2113] TxId# 281474976715659 ProcessProposeTransaction 2026-01-10T12:13:26.306055Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704926007282212:2113] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7593704930302250633:2839] 2026-01-10T12:13:26.309100Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704930302250633:2839] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:52838" 2026-01-10T12:13:26.309173Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704930302250633:2839] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:26.309198Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704930302250633:2839] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:26.309257Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704930302250633:2839] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:26.309633Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704930302250633:2839] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:26.309796Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704930302250633:2839] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:26.309879Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704930302250633:2839] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2026-01-10T12:13:26.310063Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704930302250633:2839] txid# 281474976715659 HANDLE EvClientConnected 2026-01-10T12:13:26.310738Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:26.316710Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704930302250633:2839] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2026-01-10T12:13:26.316774Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704930302250633:2839] txid# 281474976715659 SEND to# [59:7593704930302250632:2306] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2026-01-10T12:13:26.402259Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:34.385353Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6qxwfcj53g9t1jw091gj", Request deadline has expired for 3.056235s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7323 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+3148 (0x1AD3879C) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7F6543A4CD90) __libc_start_main+128 (0x7F6543A4CE40) _start+41 (0x1871E029) |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [FAIL] >> TBackupCollectionTests::OmitIndexesFlag [GOOD] >> TBackupCollectionTests::IndexCdcStreamCountRotation >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:13:29.199459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:29.389474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:29.398467Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:29.399280Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:29.399500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:29.918242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:30.118516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:30.118746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:30.189558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:30.358959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:31.594342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:31.599767Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:31.599841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:31.599879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:31.600627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:31.680888Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:32.438626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:13:36.632301Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:36.643383Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:13:36.647922Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:36.694468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:36.694622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:36.751287Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:36.754413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:37.067514Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:37.067666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:37.069568Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.070574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.071251Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.072314Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.072476Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.072596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.072738Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.072963Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.073119Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:13:37.092060Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:37.478836Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:37.511635Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:13:37.511780Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:13:37.599159Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:13:37.600930Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:13:37.601229Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:13:37.601303Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:13:37.601389Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:13:37.601512Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:13:37.601577Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:13:37.601646Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:13:37.602264Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:13:37.632193Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:37.632352Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:13:37.670370Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2194:2639] 2026-01-10T12:13:37.670741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2194:2639], schemeshard id = 72075186224037897 2026-01-10T12:13:37.752877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2243:2661] 2026-01-10T12:13:37.759448Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:13:37.795137Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Describe result: PathErrorUnknown 2026-01-10T12:13:37.795226Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Creating table 2026-01-10T12:13:37.795339Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:13:37.807251Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2310:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:13:37.814159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:37.827652Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:13:37.827836Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Subscribe on create table tx: 281474976720657 2026-01-10T12:13:37.872932Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:13:38.218021Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:13:38.382184Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:13:38.775865Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:13:38.912952Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:13:38.913075Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2667] Owner: [2:2249:2666]. Column diff is empty, finishing 2026-01-10T12:13:39.636858Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:39.954917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2746:3372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:39.955159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:39.955945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2764:3377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:39.956138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:39.976196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:40.733340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3047:3425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.733766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.734419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3051:3428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.734569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.735872Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3054:3431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:40.736067Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:13:40.736198Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3056:3433] 2026-01-10T12:13:40.736290Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3056:3433] 2026-01-10T12:13:40.736997Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3057:3184] 2026-01-10T12:13:40.737379Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3056:3433], server id = [2:3057:3184], tablet id = 72075186224037894, status = OK 2026-01-10T12:13:40.737594Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3057:3184], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:13:40.737693Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:13:40.737976Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:13:40.738057Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3054:3431], StatRequests.size() = 1 2026-01-10T12:13:40.762711Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:13:40.763309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3061:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.763518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.764279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3065:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.764452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3068:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.764534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:40.773286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:40.912718Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:13:40.912819Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:13:40.995719Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3056:3433], schemeshard count = 1 2026-01-10T12:13:41.282759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3070:3446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-10T12:13:41.487127Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3175:3515] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:13:41.506129Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3198:3531]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:13:41.506363Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:13:41.506407Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3198:3531], StatRequests.size() = 1 2026-01-10T12:13:42.018316Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3276:3561], for# user@builtin, access# DescribeSchema 2026-01-10T12:13:42.018397Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3276:3561], for# user@builtin, access# DescribeSchema 2026-01-10T12:13:42.038574Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:3266:3557], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:42.041707Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OTFjMThlMWItOTA4YzgwNzMtZTExNzhmOWUtMTY4YjQxZA==, ActorId: [1:3257:3549], ActorState: ExecuteState, LegacyTraceId: 01kekx776g7hr9dzx231e1n49y, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/Database/.metadata/_statistics]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> KqpScan::ScanRetryReadRanges [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false |88.5%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 22710, msgbus: 28647 2026-01-10T12:09:43.874321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703971348735449:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:43.874411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:43.953748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:44.023056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:44.541685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.834732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:44.834841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:44.875978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:44.903670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:44.917237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.933337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:45.255930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:45.255983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:45.255998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:45.256126Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:45.916390Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703971348735535:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:45.916453Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703979938670969:2749] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:45.916863Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703979938670969:2749] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.016031Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703979938670969:2749] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:46.058448Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703979938670969:2749] Handle TEvDescribeSchemeResult Forward to# [1:7593703979938670968:2748] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:46.069428Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703971348735535:2118] Handle TEvProposeTransaction 2026-01-10T12:09:46.069466Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703971348735535:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:09:46.069571Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703971348735535:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593703984233638273:2757] 2026-01-10T12:09:46.266264Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703984233638273:2757] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.266364Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703984233638273:2757] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.266396Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703984233638273:2757] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.266506Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703984233638273:2757] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.266963Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703984233638273:2757] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.267137Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703984233638273:2757] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:46.267252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703984233638273:2757] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:09:46.267424Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703984233638273:2757] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:09:46.268265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:46.277570Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703984233638273:2757] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:09:46.277642Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703984233638273:2757] txid# 281474976710657 SEND to# [1:7593703984233638272:2756] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:09:46.300888Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703971348735535:2118] Handle TEvProposeTransaction 2026-01-10T12:09:46.300932Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703971348735535:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:09:46.300969Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703971348735535:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7593703984233638313:2788] 2026-01-10T12:09:46.304066Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703984233638313:2788] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.304427Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703984233638313:2788] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.304446Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703984233638313:2788] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.304501Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703984233638313:2788] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.304802Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703984233638313:2788] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.304909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703984233638313:2788] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:46.304964Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703984233638313:2788] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:09:46.305091Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703984233638313:2788] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:09:46.305738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:46.311568Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703984233638313:2788] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess ... scribe.cpp:446: Actor# [59:7593704945860373119:2767] Handle TEvDescribeSchemeResult Forward to# [59:7593704945860373118:2766] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:13:30.907844Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704945860372050:2096] Handle TEvProposeTransaction 2026-01-10T12:13:30.907874Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704945860372050:2096] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:13:30.907927Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704945860372050:2096] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [59:7593704945860373124:2772] 2026-01-10T12:13:30.910980Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704945860373124:2772] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:30.911055Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704945860373124:2772] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:30.911079Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704945860373124:2772] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:30.911140Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704945860373124:2772] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:30.911525Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704945860373124:2772] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:30.911618Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704945860373124:2772] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:30.911709Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704945860373124:2772] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:13:30.911869Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704945860373124:2772] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:13:30.916775Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:30.925679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704945860373124:2772] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:13:30.925752Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704945860373124:2772] txid# 281474976710657 SEND to# [59:7593704945860373123:2771] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:13:30.988682Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704945860372050:2096] Handle TEvProposeTransaction 2026-01-10T12:13:30.988720Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704945860372050:2096] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:13:30.988769Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704945860372050:2096] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7593704945860373160:2805] 2026-01-10T12:13:30.992305Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704945860373160:2805] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:30.992395Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704945860373160:2805] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:30.992434Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704945860373160:2805] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:30.992511Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704945860373160:2805] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:30.992923Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704945860373160:2805] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:30.993074Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704945860373160:2805] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:30.993165Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704945860373160:2805] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:13:30.993338Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704945860373160:2805] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:13:30.994104Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:31.015327Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704945860373160:2805] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-10T12:13:31.027833Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704945860373160:2805] txid# 281474976710658 SEND to# [59:7593704945860373159:2804] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-10T12:13:31.096251Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:35.075516Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593704945860372044:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:35.075586Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:36.695752Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6wk1fp6c17w36b4kspnw", Request deadline has expired for 0.641635s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23028 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD38227) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7F956BA93D90) __libc_start_main+128 (0x7F956BA93E40) _start+41 (0x1871E029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [FAIL] Test command err: Starting YDB, grpc: 63703, msgbus: 11591 2026-01-10T12:09:44.609307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703977474364366:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:44.609369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:44.785663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:45.262690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.690264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:45.690399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:45.712524Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:45.775479Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:45.785985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.786098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:45.789870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:46.338085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:46.338107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:46.338117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:46.338204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:46.346167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.854214Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703977474364582:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:46.854264Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703986064300015:2749] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:46.854556Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703986064300015:2749] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.891105Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703986064300015:2749] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:46.898508Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703986064300015:2749] Handle TEvDescribeSchemeResult Forward to# [1:7593703986064300014:2748] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:46.905373Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703977474364582:2118] Handle TEvProposeTransaction 2026-01-10T12:09:46.905400Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703977474364582:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:09:46.905485Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703977474364582:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593703986064300020:2754] 2026-01-10T12:09:47.067288Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703986064300020:2754] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.067773Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703986064300020:2754] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.067807Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703986064300020:2754] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.067876Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703986064300020:2754] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.080664Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703986064300020:2754] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.080971Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703986064300020:2754] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:47.081102Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703986064300020:2754] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:09:47.081272Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703986064300020:2754] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:09:47.082072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:47.086817Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703986064300020:2754] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:09:47.086885Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703986064300020:2754] txid# 281474976710657 SEND to# [1:7593703986064300019:2753] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:09:47.111147Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703977474364582:2118] Handle TEvProposeTransaction 2026-01-10T12:09:47.111184Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703977474364582:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:09:47.111214Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703977474364582:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7593703990359267360:2789] 2026-01-10T12:09:47.114258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703990359267360:2789] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:47.114319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703990359267360:2789] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:47.114338Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703990359267360:2789] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:47.114397Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703990359267360:2789] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:47.114728Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703990359267360:2789] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:47.114842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703990359267360:2789] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:47.114896Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703990359267360:2789] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:09:47.115031Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703990359267360:2789] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:09:47.115649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/ ... : "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:13:31.992858Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704951681664704:2114] Handle TEvProposeTransaction 2026-01-10T12:13:31.992903Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704951681664704:2114] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:13:31.992966Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704951681664704:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7593704951681665634:2743] 2026-01-10T12:13:31.996834Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704951681665634:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:31.996944Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704951681665634:2743] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:31.996974Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704951681665634:2743] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:31.997055Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704951681665634:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:31.997470Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704951681665634:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:31.997581Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704951681665634:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:32.004310Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704951681665634:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:13:32.004583Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704951681665634:2743] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:13:32.005434Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:32.009713Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704951681665634:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:13:32.009795Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704951681665634:2743] txid# 281474976715657 SEND to# [59:7593704951681665633:2742] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:13:32.016128Z node 59 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:13:32.060090Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704951681664704:2114] Handle TEvProposeTransaction 2026-01-10T12:13:32.060134Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704951681664704:2114] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:13:32.060180Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704951681664704:2114] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7593704955976632987:2779] 2026-01-10T12:13:32.063702Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704955976632987:2779] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:32.063812Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704955976632987:2779] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:32.063839Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704955976632987:2779] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:32.063913Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704955976632987:2779] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:32.078567Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704955976632987:2779] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:32.078763Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704955976632987:2779] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:32.078864Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704955976632987:2779] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:13:32.079063Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704955976632987:2779] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:13:32.079908Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:32.090975Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704955976632987:2779] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:13:32.091054Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704955976632987:2779] txid# 281474976715658 SEND to# [59:7593704955976632986:2778] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:13:32.328806Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:36.267672Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593704951681664614:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:36.267756Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:37.668459Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6xmkc1bezf793zmrxs18", Request deadline has expired for 0.541322s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10236 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD38227) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7F968E54FD90) __libc_start_main+128 (0x7F968E54FE40) _start+41 (0x1871E029) |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 22728, msgbus: 21625 2026-01-10T12:09:44.668819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703975228878892:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:44.669680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:45.294749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.576154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:45.576270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:45.667800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:45.684219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.684319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:45.692842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:45.697874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:46.036732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:46.073794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:46.073818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:46.073824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:46.073905Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:46.441949Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703975228879113:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:46.442037Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703983818814542:2754] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:46.442457Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703983818814542:2754] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.587287Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703983818814542:2754] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:46.604695Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703983818814542:2754] Handle TEvDescribeSchemeResult Forward to# [1:7593703983818814541:2753] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:46.620336Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703975228879113:2118] Handle TEvProposeTransaction 2026-01-10T12:09:46.620362Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703975228879113:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:09:46.620431Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703975228879113:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593703983818814548:2760] 2026-01-10T12:09:46.774249Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703983818814548:2760] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.774336Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703983818814548:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.774370Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703983818814548:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.774437Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703983818814548:2760] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.774757Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703983818814548:2760] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.774897Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703983818814548:2760] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:46.774975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703983818814548:2760] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:09:46.775112Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703983818814548:2760] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:09:46.775853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:46.789394Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703983818814548:2760] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:09:46.789463Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703983818814548:2760] txid# 281474976710657 SEND to# [1:7593703983818814547:2759] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:09:46.833328Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703975228879113:2118] Handle TEvProposeTransaction 2026-01-10T12:09:46.833355Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703975228879113:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:09:46.833412Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703975228879113:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7593703983818814594:2796] 2026-01-10T12:09:46.835146Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703983818814594:2796] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:46.835210Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703983818814594:2796] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:46.835226Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703983818814594:2796] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:46.835274Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703983818814594:2796] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:46.835522Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703983818814594:2796] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:46.835598Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703983818814594:2796] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:46.835635Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703983818814594:2796] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:09:46.835717Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703983818814594:2796] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:09:46.836245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:46.841251Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703983818814594:2796] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResul ... Y DEBUG: describe.cpp:270: Actor# [59:7593704951421755262:2754] HANDLE EvNavigateScheme dc-1 2026-01-10T12:13:31.905737Z node 59 :TX_PROXY DEBUG: describe.cpp:354: Actor# [59:7593704951421755262:2754] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:31.905859Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7593704951421755262:2754] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:13:31.906843Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7593704951421755262:2754] Handle TEvDescribeSchemeResult Forward to# [59:7593704951421755261:2753] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:13:31.909785Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704947126786924:2112] Handle TEvProposeTransaction 2026-01-10T12:13:31.909818Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704947126786924:2112] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:13:31.909898Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704947126786924:2112] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [59:7593704951421755267:2759] 2026-01-10T12:13:31.913338Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704951421755267:2759] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:31.913440Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704951421755267:2759] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:31.913468Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704951421755267:2759] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:31.913542Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704951421755267:2759] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:31.913973Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704951421755267:2759] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:31.914087Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704951421755267:2759] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:31.914198Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704951421755267:2759] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:13:31.914376Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704951421755267:2759] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:13:31.915490Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:31.918630Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704951421755267:2759] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:13:31.918701Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704951421755267:2759] txid# 281474976710657 SEND to# [59:7593704951421755266:2758] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:13:31.948599Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704947126786924:2112] Handle TEvProposeTransaction 2026-01-10T12:13:31.948633Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704947126786924:2112] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:13:31.948676Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704947126786924:2112] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7593704951421755302:2791] 2026-01-10T12:13:31.953796Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704951421755302:2791] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:31.953882Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704951421755302:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:31.953910Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704951421755302:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:31.953978Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704951421755302:2791] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:31.954361Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704951421755302:2791] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:31.954500Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704951421755302:2791] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:31.954672Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704951421755302:2791] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:13:31.954843Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704951421755302:2791] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:13:31.955658Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:31.961041Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704951421755302:2791] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-10T12:13:31.961126Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704951421755302:2791] txid# 281474976710658 SEND to# [59:7593704951421755301:2790] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-10T12:13:37.497380Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx6xfqbf4ejer3cw8ewxm9", Request deadline has expired for 0.515264s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24748 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD38227) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7FD600BC7D90) __libc_start_main+128 (0x7FD600BC7E40) _start+41 (0x1871E029) >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TUrlMatcherTest::MatchRecursive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:13:22.696883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:22.697270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:22.936819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:22.938135Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:22.946223Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:22.946651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:22.946871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:22.948707Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:22.949097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:22.949295Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:23.524025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:23.648955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:23.649095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:23.649934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:23.650002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:23.710715Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:23.711749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:23.724708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:23.852615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:23.867130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:24.687138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:24.772832Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:24.772973Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:25.167523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:26.099059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1725:3108], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.099219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1735:3113], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.099301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.100478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1740:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.109200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:26.127619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:26.653902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1739:3116], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:13:26.831761Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1833:3182] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 2 -- EvScan [1:1912:3235] -> [2:1867:2437] -- EvScanData from [2:1914:2444]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976715661},"Finished":true} 2026-01-10T12:13:30.210604Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down ... waiting for SysViewsRoster update finished 2026-01-10T12:13:39.787328Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:39.787696Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:39.799453Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:39.801534Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:39.803148Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:39.803555Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:39.803770Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:39.806161Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:39.806430Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:39.806629Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:40.321654Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:40.472255Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:40.472511Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:40.473363Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:40.473467Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:40.551090Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:13:40.552684Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:40.553170Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:40.626608Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:40.686998Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:41.435250Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:41.511898Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:41.512036Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:41.942290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:42.658120Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1725:3109], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:42.658248Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1735:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:42.658324Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:42.659440Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1740:3118], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:42.659636Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:42.673813Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:43.301744Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1739:3117], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:13:43.425885Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1834:3184] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 4 -- EvScan [3:1913:3237] -> [4:1868:2436] -- EvScanData from [4:1915:2443]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2026-01-10T12:13:45.697723Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> ColumnShardTiers::TieringUsage [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:12:17.221894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:12:17.222001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:17.222046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:12:17.222084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:12:17.222169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:12:17.222209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:12:17.222292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:17.222372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:12:17.223299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:12:17.223619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:12:17.328109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:17.328189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:17.348824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:12:17.348963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:12:17.349173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:12:17.367391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:12:17.367650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:12:17.368324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:12:17.368550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:12:17.371687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:17.371894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:12:17.373039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:12:17.373103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:17.373168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:12:17.373208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:12:17.373251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:12:17.373483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:12:17.532731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.533829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.533980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.534978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:84: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:46.214801Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:13:46.218107Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-10T12:13:46.218292Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:13:46.218348Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710693:0 from tablet: 72057594046678944 to tablet: 72075186233409554 cookie: 72057594046678944:9 msg type: 269549568 2026-01-10T12:13:46.218501Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710693, partId: 0, tablet: 72075186233409554 2026-01-10T12:13:46.220762Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-10T12:13:46.220901Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:13:46.220938Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710694:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:12 msg type: 269549568 2026-01-10T12:13:46.221070Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710694, partId: 0, tablet: 72075186233409555 TestWaitNotification: OK eventTxId 108 2026-01-10T12:13:46.222500Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2618:4237], Recipient [25:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:13:46.222648Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:13:46.222862Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:46.223297Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable" took 445us result status StatusSuccess 2026-01-10T12:13:46.224056Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable" PathDescription { Self { Name: "CoverTable" PathId: 65 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 1 PathState: EPathStateIncomingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "CoverTable" Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "age" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "ega" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "id" KeyColumnIds: 1 TableIndexes { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 65 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:46.225471Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2619:4238], Recipient [25:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-10T12:13:46.225582Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:13:46.225800Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:13:46.226308Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable/idx_name_age" took 526us result status StatusSuccess 2026-01-10T12:13:46.227357Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable/idx_name_age" PathDescription { Self { Name: "idx_name_age" PathId: 66 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 65 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 67 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 66 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } FollowerGroups { } } } } } PathId: 66 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 6609, MsgBus: 20523 2026-01-10T12:11:54.468466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704534383807372:2216];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:54.468524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:55.319932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:55.349101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:55.349208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:55.495077Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:55.512172Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704534383807180:2081] 1768047114384080 != 1768047114384083 2026-01-10T12:11:55.521722Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:55.552055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:55.593575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:55.908803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:55.908829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:55.908837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:55.908940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:56.649782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:59.468384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704534383807372:2216];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:59.468480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:11:59.760453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704555858644571:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.760519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704555858644560:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.760648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.762342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704555858644575:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.762477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:59.765313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:59.784324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704555858644574:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:59.975672Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704555858644627:2545] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:00.364922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:12:00.587891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:12:00.590129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:12:00.590394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:12:00.590503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:12:00.590624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:12:00.590732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:12:00.590842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:12:00.590941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:12:00.591037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:12:00.591133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:12:00.591245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:12:00.591338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:12:00.591452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7593704560153612097:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:12:00.624693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:12:00.624825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:12:00.625058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:12:00.625198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:12:00.625324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:12:00.625462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:12:00.625593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593704560153612098:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:12:00.625702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:75937045 ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.349438Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.353061Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.353173Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.353203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.365028Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.365155Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.365185Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.378009Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.378095Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.378123Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.396835Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.396928Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.396982Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.412101Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.412192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.412216Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.423085Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.423180Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.423210Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.434336Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.434419Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.434446Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.444957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.445035Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.445059Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.456214Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.456296Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.456324Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.467767Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.467855Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.467882Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.483155Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.484163Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.484208Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.495451Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.495546Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.495572Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.506926Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.507009Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.507036Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:13:34.623789Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekx68145gvdm7kvca4eh98h", SessionId: ydb://session/3?node_id=4&id=MzNkYjU1ODItZTIxZTFiZmYtMzc0ZGNkMTQtODk0MTY0MjA=, Slow query, duration: 18.536104s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.5%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] |88.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:38.280390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:38.280472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.280508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:38.280537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:38.280566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:38.280593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:38.280644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.280701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:38.281414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:38.281665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:38.383035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:38.383123Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.402624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:38.402775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:38.403007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:38.412494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:38.412784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:38.413546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.413811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:38.416511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.416745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:38.418131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.418197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.418292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:38.418345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:38.418387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:38.418674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.592499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.593980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.594665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 103 ready parts: 1/1 2026-01-10T12:13:49.132345Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:13:49.132394Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:13:49.132589Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:13:49.133480Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:49.133861Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:49.134219Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-10T12:13:49.134392Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:13:49.134713Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:13:49.135028Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:13:49.135229Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:49.135462Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-10T12:13:49.135779Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-10T12:13:49.135914Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:49.136105Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:13:49.136407Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-10T12:13:49.136705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:13:49.136905Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:49.137182Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:49.137253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:49.137401Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:49.137808Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:49.137877Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:49.137971Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:49.147103Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:13:49.147191Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-10T12:13:49.147491Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:49.147530Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:49.148053Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:13:49.148111Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-10T12:13:49.148249Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:49.148301Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-10T12:13:49.148599Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:49.148963Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:13:49.149355Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:13:49.149414Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:13:49.149942Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:13:49.150056Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:13:49.150114Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:597:2539] TestWaitNotification: OK eventTxId 103 2026-01-10T12:13:49.150805Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:49.151035Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 270us result status StatusPathDoesNotExist 2026-01-10T12:13:49.151224Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:49.151815Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:49.152036Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2026-01-10T12:13:49.152593Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:49.153622Z node 6 :HIVE INFO: tablet_helpers.cpp:1586: [72057594037968897] TEvRequestHiveInfo, msg: |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:37.704330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:37.704492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:37.704547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:37.704587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:37.704631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:37.704660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:37.704756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:37.704845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:37.705781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:37.706110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:37.819035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:37.819117Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:37.840395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:37.840542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:37.840796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:37.850446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:37.850742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:37.851633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:37.851921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:37.854718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:37.854946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:37.856428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:37.856489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:37.856563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:37.856614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:37.856659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:37.856929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:37.865177Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:38.025247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:38.025549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.025853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:38.025911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:38.026215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:38.026285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:38.029577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.029853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:38.030160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.030264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:38.030314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:38.030353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:38.033042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.033126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:38.033181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:38.035202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.035258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.035319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:38.035409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:38.039498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:38.041641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:38.041860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:38.043128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.043286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:38.043342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:38.043699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:38.043759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:38.043939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:38.044030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:38.046359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.046409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 02245Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:13:50.006392Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:13:50.006545Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:50.006590Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:50.006621Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:50.006649Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:50.007002Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:13:50.007050Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:13:50.007199Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:50.007244Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:50.007294Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:50.007344Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:50.007400Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:13:50.007457Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:50.007511Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:13:50.007553Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:13:50.007766Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:13:50.010405Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:50.010955Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:50.011184Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2026-01-10T12:13:50.012674Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:13:50.012883Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:13:50.013217Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:13:50.013675Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:13:50.013913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:50.014144Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:13:50.014662Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2026-01-10T12:13:50.023076Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:50.027498Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:50.027800Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:13:50.032502Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:13:50.032772Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2026-01-10T12:13:50.036903Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:50.036982Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:50.037145Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:50.038504Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:50.038566Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:50.038656Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:50.040399Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:13:50.040489Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:13:50.040612Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:50.040641Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:50.042616Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:13:50.042663Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:13:50.042746Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:50.042789Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:13:50.043265Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:50.044345Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:13:50.044649Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:13:50.044702Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:13:50.045229Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:13:50.045337Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:13:50.045391Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:573:2515] TestWaitNotification: OK eventTxId 103 2026-01-10T12:13:50.045918Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:50.046123Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 249us result status StatusPathDoesNotExist 2026-01-10T12:13:50.046327Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:10:46.913023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:10:47.054285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:10:47.066944Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:10:47.067469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:10:47.067582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:10:47.521677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:10:47.521858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:10:47.529288Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047043275851 != 1768047043275854 2026-01-10T12:10:47.548658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:47.602037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:10:47.737633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:10:48.401870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:48.402644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:10:48.402707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:10:48.402778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:10:48.403473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:10:48.418728Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-10T12:10:58.821155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:58.821376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:58.821918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:58.822055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:58.826139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:10:59.110684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1018:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:59.110823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:59.111342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:59.111476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1025:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:59.111543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:10:59.118084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:10:59.489131Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1027:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:10:59.582236Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1118:2918] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:00.196784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:00.684721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:01.370388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:02.171462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:02.600146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:11:03.146189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:04.015295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-10T12:11:19.450928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA S ... ine=abstract.cpp:13;event=new_stage;stage=Written;task_id=d1b90210-ee1d11f0-bad6b1e7-3792d5ea; 2026-01-10T12:13:48.863420Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=133728;raw_bytes=3802392;count=3;records=3148} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037892 2026-01-10T12:13:48.863866Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadBlobs;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.864593Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-10T12:13:48.865193Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.865387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.865684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.872236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2975:4348];task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;task_class=CS::GENERAL;fline=general_compaction.cpp:138;event=blobs_created_diff;appended=0;;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:192];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:192:232];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:424:256];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:680:192];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:872:264];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1520:192];;;;switched=(portion_id:20;path_id:1000000894;records_count:1;schema_version:2;level:0;cs:plan_step=1759408950500;tx_id=18446744073709551615;;wi:10;;column_size:1328;index_size:0;meta:(()););(portion_id:19;path_id:1000000894;records_count:1;schema_version:2;level:0;;column_size:1712;index_size:0;meta:(()););; 2026-01-10T12:13:48.872375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2975:4348];task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2026-01-10T12:13:48.872437Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037892;parent_id=[1:2975:4348];task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.872718Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2026-01-10T12:13:48.872860Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:63;event=Limiter; 2026-01-10T12:13:48.872915Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=AskDiskQuota;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.872972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Writing;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.873579Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 72075186224037892 2026-01-10T12:13:48.873785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=70;sum=2726;count=57; 2026-01-10T12:13:48.873853Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:66;memory_size=190;data_size=182;sum=5510;count=58;size_of_meta=112; 2026-01-10T12:13:48.873920Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:44;memory_size=270;data_size=262;sum=7830;count=29;size_of_portion=192; 2026-01-10T12:13:48.874003Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.874360Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[49] (CS::GENERAL) apply at tablet 72075186224037892 2026-01-10T12:13:48.875396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:2975:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037892;external_task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; 2026-01-10T12:13:48.875455Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 72075186224037892 Save Batch GenStep: 1:17 Blob count: 1 2026-01-10T12:13:48.875582Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=133728;raw_bytes=3802392;count=3;records=3148} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037892 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=51;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=53;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=54;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=50;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=52;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=d1b90210-ee1d11f0-bad6b1e7-3792d5ea; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=560;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=6240;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=5680;delta=560; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=d1b9ab0c-ee1d11f0-b129d5a8-6bac3126; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> KqpPg::TableDeleteWhere-useSink [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |88.6%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectAll |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> Compression::WriteRAW [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> Compression::WriteGZIP >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll >> TBackupCollectionTests::IndexCdcStreamCountRotation [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:38.771993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:38.772111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.772154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:38.772192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:38.772250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:38.772304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:38.772372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.772463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:38.773523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:38.773879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:38.869121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:38.869190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.886036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:38.886160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:38.886348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:38.896152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:38.896508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:38.897313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.897543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:38.900168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.900364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:38.901598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.901672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.901757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:38.901804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:38.901852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:38.902112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:39.097259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.102919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:39.103608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onId 103:0, ProgressState, NeedSyncHive: 0 2026-01-10T12:13:52.875436Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-10T12:13:52.876527Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:13:52.876658Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:13:52.876716Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:13:52.876771Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2026-01-10T12:13:52.876831Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:13:52.876942Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:13:52.879889Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:13:52.879941Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:13:52.880101Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:13:52.880140Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:556:2494], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-10T12:13:52.880637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 3 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-10T12:13:52.880736Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:52.880852Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 3, ActorId:[8:506:2455], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:52.881913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:13:52.881977Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:13:52.882151Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:52.882208Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:52.882262Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:52.882316Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:52.882370Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:13:52.882426Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:52.882500Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:13:52.882546Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:13:52.882644Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:13:52.883110Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 0 2026-01-10T12:13:52.883485Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:52.883567Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:13:52.885358Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:13:52.885441Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:13:52.885995Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:13:52.886120Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:13:52.886183Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:599:2533] TestWaitNotification: OK eventTxId 103 2026-01-10T12:13:52.886872Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:52.887108Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 268us result status StatusSuccess 2026-01-10T12:13:52.887630Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 5 ShardsInside: 3 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 5 MaxChildrenInDir: 3 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 7 MaxShardsInPath: 3 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:52.888694Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-10T12:13:52.889044Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 234us result status StatusSuccess 2026-01-10T12:13:52.889595Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10 ShardsInside: 3 ShardsLimit: 10 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10 MaxChildrenInDir: 10 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 10 MaxShardsInPath: 10 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 22476, msgbus: 26910 2026-01-10T12:09:43.876672Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005394s 2026-01-10T12:09:43.883419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703972633362241:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:43.888862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:44.208211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.242470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:44.242634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:44.283690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:44.351310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:44.396176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.600736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:44.600762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:44.600770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:44.600852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:44.890174Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:44.985263Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703972633362396:2117] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:44.985327Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703976928330565:2784] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:44.985682Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703976928330565:2784] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.022894Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703976928330565:2784] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:45.029617Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703976928330565:2784] Handle TEvDescribeSchemeResult Forward to# [1:7593703976928330564:2783] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:45.033627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703972633362396:2117] Handle TEvProposeTransaction 2026-01-10T12:09:45.033661Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703972633362396:2117] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:45.033723Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703972633362396:2117] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593703981223297869:2789] 2026-01-10T12:09:45.176276Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703981223297869:2789] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.176373Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703981223297869:2789] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.176392Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703981223297869:2789] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.176458Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703981223297869:2789] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.176753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703981223297869:2789] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.176851Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703981223297869:2789] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:45.176924Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703981223297869:2789] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:45.177064Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703981223297869:2789] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:45.177716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:45.185714Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703981223297869:2789] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:45.185786Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703981223297869:2789] txid# 281474976715657 SEND to# [1:7593703981223297868:2788] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:45.200065Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703972633362396:2117] Handle TEvProposeTransaction 2026-01-10T12:09:45.200105Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703972633362396:2117] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:09:45.200133Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703972633362396:2117] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7593703981223297905:2822] 2026-01-10T12:09:45.202557Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703981223297905:2822] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.202599Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703981223297905:2822] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.202612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703981223297905:2822] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.202732Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703981223297905:2822] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.202985Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703981223297905:2822] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.203070Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703981223297905:2822] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:45.203155Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703981223297905:2822] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:09:45.203284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703981223297905:2822] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:09:45.203810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:45.208769Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703981223297905:2822] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:09:45.208815Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703981223297905:2822] txid# 281474976715658 SEND to# [1:7593703981223297904:2821] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:09:48.164424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.c ... hemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:13:38.858422Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7593704979919077431:2729] Handle TEvDescribeSchemeResult Forward to# [59:7593704979919077430:2728] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:13:38.868955Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704975624109196:2118] Handle TEvProposeTransaction 2026-01-10T12:13:38.868993Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704975624109196:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:13:38.869056Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704975624109196:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7593704979919077437:2735] 2026-01-10T12:13:38.874500Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704979919077437:2735] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:38.874600Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704979919077437:2735] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:38.874636Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704979919077437:2735] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:38.874713Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704979919077437:2735] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:38.875143Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704979919077437:2735] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:38.875243Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704979919077437:2735] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:13:38.875347Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704979919077437:2735] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:13:38.875532Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704979919077437:2735] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:13:38.876315Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:38.879455Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704979919077437:2735] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:13:38.879523Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704979919077437:2735] txid# 281474976715657 SEND to# [59:7593704979919077435:2733] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:13:38.920356Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7593704975624109196:2118] Handle TEvProposeTransaction 2026-01-10T12:13:38.920393Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7593704975624109196:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-10T12:13:38.920450Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7593704975624109196:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7593704979919077472:2767] 2026-01-10T12:13:38.923962Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7593704979919077472:2767] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:13:38.924062Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7593704979919077472:2767] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-10T12:13:38.924250Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7593704979919077472:2767] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:13:38.924338Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7593704979919077472:2767] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:38.924799Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7593704979919077472:2767] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:38.924939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7593704979919077472:2767] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:38.925028Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7593704979919077472:2767] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:13:38.925212Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7593704979919077472:2767] txid# 281474976715658 HANDLE EvClientConnected 2026-01-10T12:13:38.925986Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:38.932217Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7593704979919077472:2767] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-10T12:13:38.932294Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7593704979919077472:2767] txid# 281474976715658 SEND to# [59:7593704979919077471:2766] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-10T12:13:42.596234Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593704975624109124:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:42.596348Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:44.050023Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx749j5wxpr8wmvyq083kf", Request deadline has expired for 0.096903s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14953 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD38227) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD7BC16) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD69688) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD6878D) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7F2AFAE2AD90) __libc_start_main+128 (0x7F2AFAE2AE40) _start+41 (0x1871E029) >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:40.361776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:40.361916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:40.361968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:40.362007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:40.362054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:40.362081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:40.362158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:40.362224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:40.363134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:40.363493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:40.464875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:40.464956Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:40.488277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:40.488407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:40.488600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:40.497824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:40.498086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:40.498897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:40.499174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:40.501979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:40.502214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:40.503507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:40.503571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:40.503636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:40.503684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:40.503723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:40.503983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.511898Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:40.660294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:40.660655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.660901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:40.660956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:40.661221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:40.661311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:40.664201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:40.664447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:40.664690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.664775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:40.664856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:40.664895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:40.667652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.667718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:40.667767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:40.669951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.670032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.670085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:40.670147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:40.674357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:40.676890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:40.677130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:40.678397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:40.678550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:40.678605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:40.678993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:40.679053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:40.679248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:40.679363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:40.682004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:40.682083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 6545 cookie: 0:108 msg type: 269090816 2026-01-10T12:13:53.143541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000041 2026-01-10T12:13:53.144765Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:53.144932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:53.144996Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-10T12:13:53.145266Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-10T12:13:53.145359Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-10T12:13:53.145597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:13:53.145688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:13:53.145768Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:13:53.150997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-10T12:13:53.151446Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2026-01-10T12:13:53.157254Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:53.157307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:53.157511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:13:53.157624Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:53.157664Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-10T12:13:53.157716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 108, path id: 41 2026-01-10T12:13:53.158190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:13:53.158247Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-10T12:13:53.158410Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:13:53.158460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:13:53.158514Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:13:53.158569Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:13:53.158630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-10T12:13:53.158685Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:13:53.158745Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-10T12:13:53.158790Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-10T12:13:53.158891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:13:53.158950Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2026-01-10T12:13:53.158994Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:13:53.159031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-10T12:13:53.160439Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:13:53.160547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:13:53.160601Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:13:53.160660Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:13:53.160729Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-10T12:13:53.161993Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:13:53.162087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:13:53.162120Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:13:53.162160Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-10T12:13:53.162196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:13:53.162299Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-10T12:13:53.166762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-10T12:13:53.167269Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-10T12:13:53.167600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-10T12:13:53.167669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-10T12:13:53.168286Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-10T12:13:53.168422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:13:53.168474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:906:2875] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-10T12:13:53.175327Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:53.175622Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2026-01-10T12:13:53.175835Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:13:53.180209Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges)" TxId: 109 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 108, at schemeshard: 72057594046678944 2026-01-10T12:13:53.180569Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait |88.6%| [TA] $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DeleteAndRestart >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:38.548620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:38.548711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.548757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:38.548796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:38.548867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:38.548902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:38.548970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.549035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:38.549952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:38.550291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:38.682914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:38.683004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.717646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:38.726292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:38.726707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:38.761087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:38.761298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:38.762108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.762584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:38.780242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.780484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:38.781678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.781749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.781868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:38.781920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:38.781963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:38.782199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.936629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.937642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.937789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.937889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.937962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.938719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ], version: 18446744073709551615 2026-01-10T12:13:53.930671Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-10T12:13:53.930749Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:13:53.933795Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:13:53.933951Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:53.934010Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:53.934041Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:53.934072Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:53.934727Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:13:53.934787Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:13:53.934967Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:53.935018Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:53.935080Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:53.935128Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:53.935178Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:13:53.935236Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:53.935288Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:13:53.935335Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:13:53.935563Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:13:53.936900Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:53.937339Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-10T12:13:53.937545Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:13:53.937700Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:13:53.938060Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2026-01-10T12:13:53.939067Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:53.939672Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-10T12:13:53.939842Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:53.940234Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:13:53.940590Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-10T12:13:53.940746Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:53.941023Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:13:53.941429Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:13:53.941598Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:53.941828Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:53.941895Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:53.942044Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:53.942631Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:53.942699Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:53.942778Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:53.947419Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:13:53.947503Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-10T12:13:53.947624Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:53.947652Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:53.947715Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:13:53.947758Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-10T12:13:53.947835Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:53.947877Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-10T12:13:53.947949Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:53.948141Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:13:53.948504Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:13:53.948560Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:13:53.949012Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:13:53.949126Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:13:53.949181Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:580:2522] TestWaitNotification: OK eventTxId 103 2026-01-10T12:13:53.949744Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:53.949958Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 246us result status StatusPathDoesNotExist 2026-01-10T12:13:53.950130Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:42.797252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:42.797381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:42.797432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:42.797474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:42.797522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:42.797554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:42.797662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:42.797759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:42.798680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:42.799026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:42.907960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:42.908015Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:42.920718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:42.925837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:42.926046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:42.933357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:42.933547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:42.934309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:42.934665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:42.938121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:42.938335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:42.939519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:42.939585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:42.939729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:42.939781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:42.939825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:42.940098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:43.109255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.115996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.116062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.116149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.116219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:43.116353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:13:54.137363Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 134 2026-01-10T12:13:54.138343Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:54.139331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:54.140818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.140882Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:54.140995Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 134 -> 135 2026-01-10T12:13:54.141231Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:54.141304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:13:54.142583Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:54.142619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:54.142720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:13:54.143019Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:54.143051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:13:54.143082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-10T12:13:54.143230Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.143284Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2026-01-10T12:13:54.143331Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 135 -> 240 2026-01-10T12:13:54.144326Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:54.144404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:54.144428Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:13:54.144453Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:13:54.144486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:13:54.144872Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:54.144935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:54.144956Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:13:54.144975Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-10T12:13:54.144995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:13:54.145041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:13:54.146966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.147026Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:13:54.147167Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:54.147206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:54.147248Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:54.147281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:54.147423Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:13:54.147485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:54.147535Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:13:54.147582Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:13:54.147666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:54.148305Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:54.148361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:54.148426Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:54.148705Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:54.148748Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:54.148812Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:54.149487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:54.149842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:54.151438Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:54.151516Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:13:54.151721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:54.151779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:54.152124Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:54.152196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:54.152235Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:345:2335] TestWaitNotification: OK eventTxId 102 2026-01-10T12:13:54.152668Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:54.152835Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 198us result status StatusPathDoesNotExist 2026-01-10T12:13:54.152968Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:54.530048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:54.530151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:54.530195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:54.530244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:54.530345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:54.530381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:54.530476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:54.530580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:54.531586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:54.531981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:54.634881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:54.634959Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:54.667851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:54.668006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:54.668280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:54.683185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:54.683491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:54.684343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:54.684631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:54.687296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:54.687524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:54.688974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:54.689060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:54.689143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:54.689193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:54.689238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:54.689490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.698223Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:54.852014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:54.852285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.852525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:54.852581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:54.852811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:54.852893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:54.857013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:54.857239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:54.857488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.857551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:54.857607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:54.857641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:54.860009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.860098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:54.860164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:54.862578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.862637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:54.862690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:54.862748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:54.866700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:54.872597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:54.872870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:54.874113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:54.874334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:54.874406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:54.874742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:54.874804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:54.874994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:54.875088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:54.877698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:54.877779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... sh_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409549 2026-01-10T12:13:55.327501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:55.327720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:55.328880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:55.328971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:55.329135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:55.329518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:13:55.329570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:13:55.334360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-10T12:13:55.334412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-10T12:13:55.334575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2026-01-10T12:13:55.335225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:55.335351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:55.335381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:55.335736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:55.335811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:55.335900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:55.336765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:13:55.336807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:13:55.336890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:13:55.336934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:13:55.337212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:55.337245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:13:55.337667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:13:55.337734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:13:55.337834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:55.339961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-10T12:13:55.340260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:13:55.340310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:13:55.340433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:55.340480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:55.341005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:13:55.341137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:13:55.341182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:714:2607] 2026-01-10T12:13:55.341463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:55.341560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:55.341590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:714:2607] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-10T12:13:55.342134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:55.342380Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 271us result status StatusPathDoesNotExist 2026-01-10T12:13:55.342601Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:55.343073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:55.343258Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 199us result status StatusPathDoesNotExist 2026-01-10T12:13:55.343407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:55.343957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:55.348583Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 4.53ms result status StatusSuccess 2026-01-10T12:13:55.349196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:37.266424Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704976850403020:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:37.266500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:37.366591Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.457389Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.967667Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:37.967847Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:38.036172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.190195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.190315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.339283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.339351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.397821Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.394211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.399976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.424317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.424377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:38.429532Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.474454Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:38.484490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.500192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.832526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.905301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003648/r3tmp/yandexQQfj0y.tmp 2026-01-10T12:13:38.905329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003648/r3tmp/yandexQQfj0y.tmp 2026-01-10T12:13:38.916471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003648/r3tmp/yandexQQfj0y.tmp 2026-01-10T12:13:38.916629Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:38.953524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:39.019655Z INFO: TTestServer started on Port 3668 GrpcPort 18784 PQClient connected to localhost:18784 === TenantModeEnabled() = 1 === Init PQ - start server on port 18784 2026-01-10T12:13:39.965258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:39.965455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.965662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:39.965680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:39.965846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:39.965892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:39.993797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.993993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:39.994185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.994226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:39.994245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:39.994259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:13:39.996780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.996808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:13:39.996840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:40.008259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:40.008323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:40.008341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:13:40.017491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:40.017536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:40.017554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:40.017589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:13:40.022326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:40.025695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:13:40.025821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:40.037965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047220072, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:40.038141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047220072 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:40.038171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:40.038811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:13:40.038870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:40.039179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026- ... 661227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-10T12:13:53.661235Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:53.661252Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-10T12:13:53.661260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:53.661290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-10T12:13:53.661326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2026-01-10T12:13:53.661340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-10T12:13:53.661348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:53.661359Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-10T12:13:53.661368Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2026-01-10T12:13:53.661378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-10T12:13:53.663255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:53.663501Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2026-01-10T12:13:53.663622Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:13:53.663631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-10T12:13:53.663820Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:13:53.663832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7593705018009495841:2390], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 46 2026-01-10T12:13:53.665003Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:53.665120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:53.665134Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2026-01-10T12:13:53.665154Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-10T12:13:53.665170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-10T12:13:53.665261Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2026-01-10T12:13:53.666936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:53.669444Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-10T12:13:53.669471Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-10T12:13:53.670045Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2026-01-10T12:13:53.670147Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:55192 2026-01-10T12:13:53.670190Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:55192 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-10T12:13:53.670199Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:53.671546Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-10T12:13:53.671788Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:13:53.671799Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:13:53.671808Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:13:53.671843Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593705043779300915:2377] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-10T12:13:53.671866Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:13:53.672490Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-10T12:13:53.672691Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2026-01-10T12:13:53.673092Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 2026-01-10T12:13:53.674298Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-10T12:13:53.674639Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1365: updating token 2026-01-10T12:13:53.674679Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:53.675416Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 describe result for acl check 2026-01-10T12:13:53.675516Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 2026-01-10T12:13:53.675777Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group|c0b871d-9735862e-84b009f5-d18be38e_0 is DEAD 2026-01-10T12:13:53.676060Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:13:54.187584Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705048074268230:2386], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:54.190342Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MjVmMTAyOWItYWQ0NDdkMmEtNmQ3YmY4NDQtOWQ0Y2EwODM=, ActorId: [3:7593705048074268223:2382], ActorState: ExecuteState, LegacyTraceId: 01kekx7k3e0wzc742gsmq5d4fk, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:54.190771Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:37.419276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704975923370090:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:37.419636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:37.662245Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.673636Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:38.336829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.336973Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:38.433968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.434123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:38.548272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.548370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.592739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.592847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.665500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.689503Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:38.690226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.720278Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.724382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.727052Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.732377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704975923369961:2082] 1768047217390472 != 1768047217390475 2026-01-10T12:13:38.753628Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.758426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:39.170978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003646/r3tmp/yandexT7bV4E.tmp 2026-01-10T12:13:39.171006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003646/r3tmp/yandexT7bV4E.tmp 2026-01-10T12:13:39.171161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003646/r3tmp/yandexT7bV4E.tmp 2026-01-10T12:13:39.171250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:39.332232Z INFO: TTestServer started on Port 13826 GrpcPort 21384 PQClient connected to localhost:21384 === TenantModeEnabled() = 1 === Init PQ - start server on port 21384 2026-01-10T12:13:39.969649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:39.969864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.970099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:39.970123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:39.970317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:39.970375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:39.972921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.973106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:39.973256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.973297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:39.973316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:39.973327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:13:39.977201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.977251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:39.977274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:13:39.982168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.982188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:13:39.982203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.982385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.982409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.982422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.982443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:13:39.988064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:39.995852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:13:39.995984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:39.999466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047220044, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.999643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047220044 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:39.999682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.999985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:13:40.000021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:40.000217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:13:40.000260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:13:40.002739Z node 1 :FLAT_TX_SCHEMESHARD INFO: scheme ... p:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 48] was 4 2026-01-10T12:13:53.012542Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2026-01-10T12:13:53.012574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7593705042299465221:2360] 2026-01-10T12:13:53.014805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-10T12:13:53.014834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2026-01-10T12:13:53.131085Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-10T12:13:53.131118Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 1 2026-01-10T12:13:53.131965Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2026-01-10T12:13:53.132062Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:42428 2026-01-10T12:13:53.132097Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42428 proto=v1 topic=Root/acc/topic1 durationSec=0 2026-01-10T12:13:53.132106Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:53.136126Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 1 sessionId: describe result for acl check 2026-01-10T12:13:53.136406Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:13:53.136423Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:13:53.136432Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:13:53.136474Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593705046594432709:2364] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-10T12:13:53.136508Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:13:53.137309Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-10T12:13:53.137536Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-10T12:13:53.137968Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 2026-01-10T12:13:53.140056Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 grpc read done: success: 0 data: 2026-01-10T12:13:53.140093Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 grpc read failed 2026-01-10T12:13:53.140300Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 1 sessionId: 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 2026-01-10T12:13:53.140314Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 12345678|e0c93b86-d685af05-9dbe97c5-92accd09_0 is DEAD 2026-01-10T12:13:53.140549Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2026-01-10T12:13:53.173555Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-10T12:13:53.173580Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-10T12:13:53.174039Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2026-01-10T12:13:53.174152Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:42428 2026-01-10T12:13:53.174169Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42428 proto=v1 topic=topic1 durationSec=0 2026-01-10T12:13:53.174179Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:53.178344Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-10T12:13:53.178503Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:13:53.178514Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:13:53.178522Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:13:53.178562Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593705046594432729:2373] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-10T12:13:53.178598Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:13:53.184589Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-10T12:13:53.184897Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-10T12:13:53.185402Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 2026-01-10T12:13:53.187329Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 grpc read done: success: 0 data: 2026-01-10T12:13:53.187344Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 grpc read failed 2026-01-10T12:13:53.187374Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 grpc closed 2026-01-10T12:13:53.187390Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 12345678|2e1529c6-e5c4c86e-d468f6b6-e87c7bb4_0 is DEAD 2026-01-10T12:13:53.189435Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:13:53.798480Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705046594432753:2385], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:53.801394Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MTc4MGYxMDQtZDU5NDdjODYtYjNlNjkwZjQtMjRmNGU3YWU=, ActorId: [3:7593705046594432746:2381], ActorState: ExecuteState, LegacyTraceId: 01kekx7jpn7tr36bxaxftbd605, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:53.802040Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:38.496826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:38.496917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.496960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:38.496997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:38.497054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:38.497086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:38.497147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.497220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:38.498481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:38.498789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:38.601411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:38.601472Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.617285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:38.617406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:38.617575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:38.625486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:38.625707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:38.626398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.626652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:38.633076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.633269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:38.634456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.634519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.634585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:38.634632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:38.634675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:38.634947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.833872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.835948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.836011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.836097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.836226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.836310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.836414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... State leave, operationId 103:0, at tablet# 72057594046678944 2026-01-10T12:13:55.845641Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-10T12:13:55.845860Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:55.857146Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:13:55.857382Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-10T12:13:55.857934Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.858122Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 34359740527 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:55.858192Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-10T12:13:55.858600Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-10T12:13:55.858685Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-10T12:13:55.858926Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:13:55.859163Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:588: Send TEvUpdateTenantSchemeShard, to actor: [8:800:2770], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:55.869527Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6262: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2026-01-10T12:13:55.869725Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2026-01-10T12:13:55.869978Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:13:55.870544Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:55.870628Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:13:55.870896Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.870991Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:213:2214], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:13:55.871627Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.871716Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2026-01-10T12:13:55.871789Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-10T12:13:55.872858Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:13:55.873005Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:13:55.873075Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:13:55.873136Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-10T12:13:55.873203Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-10T12:13:55.873323Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:13:55.886157Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-10T12:13:55.886306Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:13:55.886427Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:800:2770], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:55.886598Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-10T12:13:55.886638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-10T12:13:55.886785Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-10T12:13:55.886824Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:892:2836], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-10T12:13:55.893157Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409546, cookie: 0 2026-01-10T12:13:55.894021Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.894098Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:13:55.894288Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:55.894346Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:55.894405Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:13:55.894459Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:55.894517Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:13:55.894580Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:13:55.894637Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:13:55.894686Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:13:55.894792Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:13:55.895246Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:13:55.895336Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-10T12:13:55.905560Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:13:55.905650Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:13:55.906333Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:13:55.906489Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:13:55.906549Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1021:2963] TestWaitNotification: OK eventTxId 103 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:36.798237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704971523458086:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:36.798317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:36.901309Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704974650489729:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:36.901361Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:36.913569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:36.913694Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:36.973309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:37.010506Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.312404Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:37.324215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:37.366432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:37.366579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:37.370579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:37.370726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:37.403338Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:37.404349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:37.405560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:37.584346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:37.590102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:37.670451Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.011028s 2026-01-10T12:13:37.884069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:37.948250Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:37.959387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.092892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003649/r3tmp/yandexHp7E83.tmp 2026-01-10T12:13:38.092925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003649/r3tmp/yandexHp7E83.tmp 2026-01-10T12:13:38.093119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003649/r3tmp/yandexHp7E83.tmp 2026-01-10T12:13:38.093219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:38.212617Z INFO: TTestServer started on Port 13583 GrpcPort 23938 PQClient connected to localhost:23938 === TenantModeEnabled() = 1 === Init PQ - start server on port 23938 2026-01-10T12:13:38.991895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:38.992140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:38.992396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:38.992419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:38.992637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:38.992695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:38.995406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:38.995584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:38.995786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:38.995826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:38.995843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:38.995856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-10T12:13:38.998675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:38.998727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:38.998750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-10T12:13:39.001255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.001295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.001327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.001363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-10T12:13:39.006130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:39.008763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-10T12:13:39.008878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:39.011110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.011147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-10T12:13:39.011247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.012332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047219057, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.012504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047219057 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:39.012541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.012835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-10T12:13:39.012881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.013048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish p ... HEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976710666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54994" , at schemeshard: 72057594046644480 2026-01-10T12:13:53.042551Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710666:0, at schemeshard: 72057594046644480 2026-01-10T12:13:53.042633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-10T12:13:53.042640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-10T12:13:53.042744Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:53.042767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:53.042827Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-01-10T12:13:53.042834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-10T12:13:53.042849Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-01-10T12:13:53.042856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-10T12:13:53.042885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-10T12:13:53.042927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710666, ready parts: 1/1, is published: false 2026-01-10T12:13:53.042944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-10T12:13:53.042956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-10T12:13:53.042968Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710666:0 2026-01-10T12:13:53.042980Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710666, publications: 1, subscribers: 0 2026-01-10T12:13:53.042991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710666, [OwnerId: 72057594046644480, LocalPathId: 46], 4 2026-01-10T12:13:53.045384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710666, response: Status: StatusSuccess TxId: 281474976710666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:53.045682Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2026-01-10T12:13:53.045841Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:13:53.045854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710666, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-10T12:13:53.045998Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:13:53.046018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7593705020221948865:2393], at schemeshard: 72057594046644480, txId: 281474976710666, path id: 46 2026-01-10T12:13:53.047169Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-01-10T12:13:53.047258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-01-10T12:13:53.047269Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710666 2026-01-10T12:13:53.047285Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710666, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 4 2026-01-10T12:13:53.047301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-10T12:13:53.047389Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710666, subscribers: 0 ===Wait for session created with token with removed ACE to die2026-01-10T12:13:53.050824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 2026-01-10T12:13:53.553194Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705045991753944:2385], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:53.556496Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZTM3MjhhMmYtZGEyZjU2MDQtYzZmZjA4NWYtOTc0YzU4N2I=, ActorId: [3:7593705045991753937:2381], ActorState: ExecuteState, LegacyTraceId: 01kekx7jfc731hc5a770fpj8z2, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:53.556914Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:13:54.023282Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:54.024186Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: test-group-id|dd998353-f8f5a470-7a8daf25-f668b66a_0 describe result for acl check 2026-01-10T12:13:54.024329Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|dd998353-f8f5a470-7a8daf25-f668b66a_0 2026-01-10T12:13:54.024742Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|dd998353-f8f5a470-7a8daf25-f668b66a_0 is DEAD 2026-01-10T12:13:54.025073Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2026-01-10T12:13:54.593238Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705050286721265:2393], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:54.595869Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MTNhOWZjZjgtOGVhZDBkYzYtNWVkNWZjNTgtNTZlZDQwOA==, ActorId: [3:7593705050286721263:2392], ActorState: ExecuteState, LegacyTraceId: 01kekx7kg0ax4kabr9njs0kqvb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:54.597654Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> YdbIndexTable::OnlineBuild >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11892, MsgBus: 4668 2026-01-10T12:07:00.472521Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703270215450966:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:00.472578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:07:01.117746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:07:01.154186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:07:01.154296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:07:01.158829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:07:01.167202Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593703270215450722:2081] 1768046820429439 != 1768046820429442 2026-01-10T12:07:01.212665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:07:01.513193Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:07:01.607625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:07:01.607858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:07:01.607877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:07:01.607885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:07:01.607963Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:07:02.496390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:07:02.524422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:07:05.472720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593703270215450966:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:07:05.472783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:07:06.223405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-10T12:07:06.575319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-10T12:07:06.713533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-10T12:07:06.909961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-10T12:07:07.163494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-10T12:07:07.415070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"abcd ","abcd "} 2026-01-10T12:07:07.666289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-10T12:07:07.807069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-10T12:07:07.960303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-10T12:07:08.159106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-10T12:07:08.315093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-10T12:07:08.504000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-10T12:07:08.644591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-10T12:07:08.768563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-10T12:07:08.895531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 1111 2026-01-10T12:07:09.084017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {1111,1111} 2026-01-10T12:07:09.286157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2026-01-10T12:07:09.451152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, subopera ... 13:44.917164Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.056698Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:45.103564Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710837:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.236354Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 650 2026-01-10T12:13:45.244792Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710839 at tablet 72075186224037947 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710839] at 72075186224037947 while waiting for stream clearance) | 2026-01-10T12:13:45.248383Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710839 at tablet 72075186224037947 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710839] at 72075186224037947 while waiting for stream clearance) | 2026-01-10T12:13:45.295647Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710840:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.457533Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:45.542443Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710842:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 829 2026-01-10T12:13:45.767014Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710843:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.942835Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:46.002513Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 774 2026-01-10T12:13:46.234095Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710846:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:46.471735Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:46.640268Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2950 2026-01-10T12:13:46.646422Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710849 at tablet 72075186224037953 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710849] at 72075186224037953 while waiting for stream clearance) | 2026-01-10T12:13:46.648455Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710849 at tablet 72075186224037953 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710849] at 72075186224037953 while waiting for stream clearance) | 2026-01-10T12:13:46.700386Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710850:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:46.920206Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:47.065675Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 114 2026-01-10T12:13:47.233275Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:47.396982Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710854:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:47.539846Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3802 2026-01-10T12:13:47.594007Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710856:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:47.849029Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:47.906635Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710858:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 4072 2026-01-10T12:13:48.116257Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710859:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:48.280243Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:48.359772Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710861:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:48.518286Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 142 2026-01-10T12:13:48.583245Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710863:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:48.815601Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710864:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:48.941008Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3615 2026-01-10T12:13:48.991204Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710866:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.161837Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.267820Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3614 2026-01-10T12:13:49.318090Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.463437Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:13:49.504682Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710871:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.683087Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 22 2026-01-10T12:13:49.743868Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710873:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.961238Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710874:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:38.704288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:38.704403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.704448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:38.704488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:38.704550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:38.704583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:38.704662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:38.704749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:38.705666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:38.706015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:38.796532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:38.796641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.815417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:38.820823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:38.821074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:38.829625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:38.829867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:38.830680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:38.831240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:38.835205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.835453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:38.836758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:38.836826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:38.837022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:38.837080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:38.837130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:38.837404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:38.994938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.996994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.997094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.997183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.997294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.997388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:38.997454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2026-01-10T12:13:56.623290Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 3 -> 128 2026-01-10T12:13:56.633966Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.634255Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.634328Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.634394Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2026-01-10T12:13:56.634475Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2026-01-10T12:13:56.634657Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:56.649168Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-10T12:13:56.649394Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-10T12:13:56.649885Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:56.650068Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 34359740527 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:56.650160Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-10T12:13:56.650626Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:13:56.650720Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-10T12:13:56.650921Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:13:56.651071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:781:2755], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:13:56.657342Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:56.657436Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:13:56.657744Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:56.657816Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:213:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:13:56.658381Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.658470Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2026-01-10T12:13:56.658525Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-10T12:13:56.659865Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:56.660010Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:13:56.660063Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:13:56.660272Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:13:56.660337Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-10T12:13:56.660503Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:13:56.665719Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.665803Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:13:56.665996Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:56.666052Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:56.666110Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:13:56.666154Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:56.666208Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:13:56.666310Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:723:2713] message: TxId: 102 2026-01-10T12:13:56.666378Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:13:56.666436Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:13:56.666482Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:13:56.666810Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:13:56.667482Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:56.671619Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:56.671711Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:970:2912] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-10T12:13:56.675656Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:56.675916Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2026-01-10T12:13:56.675981Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2026-01-10T12:13:56.676176Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-10T12:13:56.676252Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-10T12:13:56.679667Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:56.680050Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:55.272898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:55.272998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.273052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:55.273104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:55.273144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:55.273170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:55.273251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.273342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:55.274231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:55.274539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:55.424067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:55.428244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:55.468941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:55.469062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:55.469277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:55.485871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:55.486238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:55.487026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.487313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:55.490035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.490224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:55.491483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:55.491544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.491611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:55.491658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:55.491698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:55.492000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.499177Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:55.642808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:55.643088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.643306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:55.643367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:55.643643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:55.643719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:55.646412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.646637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:55.646899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.646965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:55.647022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:55.647058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:55.649681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.649764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:55.649808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:55.651804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.651855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.651900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:55.651954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:55.655843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:55.663292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:55.663535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:55.664974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.665124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:55.665180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:55.665501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:55.665578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:55.665755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:55.665835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:55.673672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:55.673751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... T_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:57.125452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:57.125480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:13:57.125982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:57.126999Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-10T12:13:57.128948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:57.129234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:13:57.129525Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:13:57.130065Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:13:57.130170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:13:57.130371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2026-01-10T12:13:57.131327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:13:57.131481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:13:57.131895Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:13:57.133483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:13:57.133680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-10T12:13:57.135021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:57.135065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:13:57.135129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:13:57.135648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:57.135701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:57.135831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:13:57.136114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:57.136729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:13:57.140633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:13:57.140720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:13:57.148435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:13:57.148503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:13:57.148635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:13:57.148666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:13:57.148746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:13:57.148789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:13:57.149012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:57.149129Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:13:57.149199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:13:57.149243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:13:57.149334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:57.151575Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:13:57.151863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:57.151907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:57.152429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:57.152533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:57.152571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:540:2494] TestWaitNotification: OK eventTxId 102 2026-01-10T12:13:57.169890Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:57.170139Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 283us result status StatusPathDoesNotExist 2026-01-10T12:13:57.170329Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:13:57.171039Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:57.171208Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 179us result status StatusPathDoesNotExist 2026-01-10T12:13:57.171337Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:37.522726Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704975993119646:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:37.524624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:37.796496Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.922815Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:38.351641Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.351836Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:38.417227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.456698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.456816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.463328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.463759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.477528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.482966Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:38.499680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.584140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.643983Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.664376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.836660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.856524Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.927050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003643/r3tmp/yandexSC16fH.tmp 2026-01-10T12:13:38.927085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003643/r3tmp/yandexSC16fH.tmp 2026-01-10T12:13:38.927271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003643/r3tmp/yandexSC16fH.tmp 2026-01-10T12:13:38.927410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:39.007246Z INFO: TTestServer started on Port 10498 GrpcPort 22242 PQClient connected to localhost:22242 === TenantModeEnabled() = 1 === Init PQ - start server on port 22242 2026-01-10T12:13:39.620132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:39.620360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.624962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:39.624990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:39.625272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:39.625418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:39.629928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.630098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:39.630286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.630325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:39.630349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:39.630361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:13:39.636917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.636971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:39.636994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:13:39.639200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.639235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:39.639258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.639294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:13:39.644277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:39.644745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.644761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:13:39.644779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:39.647150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:13:39.647275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:39.650414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047219694, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:39.650580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047219694 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:39.650607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.650860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:13:39.650897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:39.651050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:13:39.651091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:13:39.653780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:13:39.653807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:13:39.653995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard ... : partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.710575Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.710586Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.748266Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.748301Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.748314Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.748333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.748348Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.768186Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.768218Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.768233Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.768251Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.768262Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.812337Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.812370Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.812382Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.812399Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.812410Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.850400Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.850422Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.850429Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.850440Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.850446Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.872163Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.872190Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.872198Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.872209Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.872217Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.916196Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.916227Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.916240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.916258Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.916266Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.952215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.952249Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.952262Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.952282Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.952293Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:13:54.972121Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:54.972159Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.972173Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:54.972192Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:54.972205Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.016206Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.016269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.016285Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.016305Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.016316Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.056251Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.056286Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.056299Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.056318Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.056333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.072747Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.072782Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.072794Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.072812Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.072824Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.116193Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.116225Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.116240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.116258Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.116269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.159788Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.159818Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.159829Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.159847Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.159857Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.175079Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.175109Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.175120Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.175138Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.175149Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:13:55.222163Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:13:55.222198Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.222211Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:13:55.222231Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:13:55.222243Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:56.765471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:56.765572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:56.765614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:56.765658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:56.765720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:56.765753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:56.765807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:56.765898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:56.766948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:56.767256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:56.879638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:56.879718Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:56.917922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:56.918036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:56.918234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:56.927735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:56.928041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:56.928867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:56.929123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:56.931760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:56.931963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:56.933323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:56.933393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:56.933463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:56.933513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:56.933559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:56.933869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:57.100965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.105984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.106063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.106193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.106282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.106369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:58.130059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:13:58.130342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-10T12:13:58.130414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:13:58.130580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:13:58.130641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:13:58.130700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:13:58.133388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:58.133448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:58.133663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:13:58.133817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:58.133857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:13:58.133898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:13:58.134136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:13:58.134180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:13:58.134289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:13:58.134324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.134379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:13:58.134424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.134460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:13:58.134502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.134539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:13:58.134575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:13:58.134717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:13:58.134768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:13:58.134808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:13:58.134841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:13:58.135624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.135717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.135752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:13:58.135805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:13:58.135899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:13:58.136911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.137011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.137043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:13:58.137078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:13:58.137109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:13:58.137212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:13:58.141029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:13:58.142044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-10T12:13:58.146120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:58.146358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:13:58.146560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2026-01-10T12:13:58.149266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-10T12:13:58.149509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-10T12:13:58.149866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:13:58.149965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:13:58.150073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:58.150096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:58.150611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:13:58.150753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:13:58.150794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:726:2716] 2026-01-10T12:13:58.150933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:58.151007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:58.151039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:726:2716] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:57.511544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:57.511623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:57.511659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:57.511698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:57.511753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:57.511781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:57.511863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:57.511973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:57.512805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:57.513093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:57.611492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:57.611564Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:57.626905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:57.627004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:57.627203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:57.636314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:57.636555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:57.637306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:57.637535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:57.639738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:57.639936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:57.641384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:57.641444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:57.641578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:57.641628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:57.641670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:57.641966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:57.802252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.803963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.804046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.804153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.804242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.804312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... plete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:13:58.657850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:13:58.665040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:58.665087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:58.665246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:13:58.665335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:58.665382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:13:58.665443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:13:58.665529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:13:58.665570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:13:58.665689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:13:58.665742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.665785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:13:58.665817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.665850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:13:58.665900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:13:58.665952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:13:58.665998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:13:58.666061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:13:58.666098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:13:58.666129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:13:58.666163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:13:58.667158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.667249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.667281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:13:58.667322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:13:58.667361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:13:58.668344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.668431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:13:58.668465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:13:58.668492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:13:58.668534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:13:58.668620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:13:58.670950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:13:58.672277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-10T12:13:58.675810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:58.676197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2026-01-10T12:13:58.676264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2026-01-10T12:13:58.676456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-10T12:13:58.676508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-10T12:13:58.678732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:58.678959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-10T12:13:58.679289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:13:58.679330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:13:58.679409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:13:58.679430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:13:58.679853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:13:58.680018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:13:58.680058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:720:2710] 2026-01-10T12:13:58.680255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:13:58.680323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:13:58.680346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:720:2710] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::ForceDropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:55.113600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:55.113687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.113739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:55.113792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:55.113833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:55.113883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:55.113943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.114075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:55.114917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:55.115244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:55.283305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:55.283368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:55.301078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:55.301189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:55.301422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:55.310602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:55.310890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:55.311719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.311991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:55.314867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.315062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:55.316492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:55.316564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.316641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:55.316693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:55.316773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:55.317026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.517956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.519995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.520068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.520408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.520539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.520643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:55.520763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:59.221620Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 232us result status StatusSuccess 2026-01-10T12:13:59.222075Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:59.222608Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:59.222803Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 211us result status StatusSuccess 2026-01-10T12:13:59.223158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:59.223670Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:59.223861Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 174us result status StatusSuccess 2026-01-10T12:13:59.224264Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:59.224748Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:13:59.224946Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 200us result status StatusSuccess 2026-01-10T12:13:59.225344Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:42.830823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705000498210898:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:42.830967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:42.955013Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704996648201580:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:42.955100Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:42.963498Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:42.998808Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:43.225887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:43.222131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:43.402997Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:43.555637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:43.555782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:43.560196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:43.560261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:43.651466Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:43.651669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:43.655631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:43.687968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:43.709973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:43.740549Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:43.872632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:43.928822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003641/r3tmp/yandexfgBrQX.tmp 2026-01-10T12:13:43.928846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003641/r3tmp/yandexfgBrQX.tmp 2026-01-10T12:13:43.929010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003641/r3tmp/yandexfgBrQX.tmp 2026-01-10T12:13:43.929101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:43.964675Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:44.068431Z INFO: TTestServer started on Port 7619 GrpcPort 8404 2026-01-10T12:13:44.152200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:8404 === TenantModeEnabled() = 1 === Init PQ - start server on port 8404 2026-01-10T12:13:44.513798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:44.513985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:44.514181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:44.514201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:44.514388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:44.514440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:44.517337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:44.517542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:44.517715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:44.517751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:44.517780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:44.517793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 2026-01-10T12:13:44.520395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:44.520443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:44.520461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2026-01-10T12:13:44.522144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-10T12:13:44.522167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2026-01-10T12:13:44.522185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-10T12:13:44.523075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:44.523113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:44.523170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:44.523205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2026-01-10T12:13:44.528401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:44.531128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2026-01-10T12:13:44.531242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:44.534115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047224580, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:44.534272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047224580 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:44.534316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:44.534603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 128 -> 240 2026-01-10T12:13:44.534649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:44.534811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:13:44.534859Z ... eshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:13:57.327564Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-10T12:13:57.327579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:57.327599Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-10T12:13:57.327614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:57.327652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-10T12:13:57.327694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2026-01-10T12:13:57.327711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-10T12:13:57.327721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-10T12:13:57.327746Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-10T12:13:57.327758Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2026-01-10T12:13:57.327768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-10T12:13:57.331071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:57.331545Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2026-01-10T12:13:57.331746Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:13:57.331767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-10T12:13:57.332026Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:13:57.332042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7593705037457009212:2399], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 46 2026-01-10T12:13:57.332763Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:57.332844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:57.332857Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2026-01-10T12:13:57.332878Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-10T12:13:57.332899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-10T12:13:57.332998Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2026-01-10T12:13:57.337657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2026-01-10T12:13:57.337839Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-10T12:13:57.337852Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-10T12:13:57.338389Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2026-01-10T12:13:57.338480Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:39126 2026-01-10T12:13:57.338495Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1551: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:39126 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-10T12:13:57.338507Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-01-10T12:13:57.340441Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-10T12:13:57.340630Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:13:57.340642Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:13:57.340650Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:13:57.340686Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593705063226814253:2375] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-10T12:13:57.340705Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:13:57.341490Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-10T12:13:57.341630Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-01-10T12:13:57.342002Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 2026-01-10T12:13:57.343527Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-10T12:13:57.343806Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-10T12:13:57.343851Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 2026-01-10T12:13:57.344026Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|ac49bd65-f16544ff-8bc9c04f-c85000a4_0 is DEAD 2026-01-10T12:13:57.344344Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:13:57.827545Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705063226814274:2384], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:57.828565Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=Zjk5NTliODgtZTkyNDkzNmItMzdhYzU5MGUtNDk5MzQyM2E=, ActorId: [3:7593705063226814267:2380], ActorState: ExecuteState, LegacyTraceId: 01kekx7pn4fasy9jepncpecg7e, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:57.829040Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:57.271725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:57.271786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:57.271841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:57.271903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:57.271937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:57.271963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:57.272013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:57.272098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:57.272852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:57.273149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:57.357323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:57.357385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:57.382080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:57.382181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:57.382375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:57.395867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:57.396151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:57.396903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:57.397139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:57.401414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:57.401608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:57.402877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:57.402956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:57.403037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:57.403084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:57.403122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:57.403415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:57.673672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.682895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.683009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.683078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:57.683146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:00.543263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.543408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:14:00.546077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 106, at schemeshard: 72057594046678944 2026-01-10T12:14:00.546299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2026-01-10T12:14:00.546925Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:00.547107Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 203us result status StatusSuccess 2026-01-10T12:14:00.547487Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:00.548117Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:00.548299Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 216us result status StatusSuccess 2026-01-10T12:14:00.548669Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:00.549400Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:00.549537Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 155us result status StatusSuccess 2026-01-10T12:14:00.549807Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:00.550418Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:00.550561Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 163us result status StatusSuccess 2026-01-10T12:14:00.550887Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 41 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 41 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:59.826499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:59.826603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:59.826646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:59.826694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:59.826739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:59.826774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:59.826831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:59.826962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:59.827969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:59.828334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:59.939093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:59.939169Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:59.967457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:59.973437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:59.973715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:59.988921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:59.989190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:59.990063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:59.990558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:59.995560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:59.995793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:59.997436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:59.997534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:59.997688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:59.997748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:59.997802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:59.998078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.179683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.181990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.182060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.182164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.182247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.182329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-10T12:14:01.045264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-10T12:14:01.045323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-10T12:14:01.045504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:01.045572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:01.045638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-01-10T12:14:01.048068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.048149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.048376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:01.048534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.048596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:01.048661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-10T12:14:01.048980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.049041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:01.049160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:01.049204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:01.049331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:01.049374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:01.049416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:01.049483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:01.049538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:01.049575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:01.049654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:14:01.049694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-10T12:14:01.049744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:14:01.049785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:14:01.050755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:01.050879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:01.050925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:01.050967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:14:01.051008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:01.054775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:01.054910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:01.054968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:01.055010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:14:01.055051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:01.055158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-10T12:14:01.058791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:01.059789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-10T12:14:01.060131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:01.060181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-10T12:14:01.060283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:01.060328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:01.060882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:01.061002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:01.061047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:723:2713] 2026-01-10T12:14:01.061258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:01.061365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:01.061397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:723:2713] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:01.061961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:01.062301Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 274us result status StatusSuccess 2026-01-10T12:14:01.063267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:41.382707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:41.382793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:41.382839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:41.382877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:41.382925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:41.382970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:41.383055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:41.383130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:41.384104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:41.384428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:41.494515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:41.494581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:41.518819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:41.518962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:41.519169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:41.530638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:41.530930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:41.531992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:41.532391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:41.535114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:41.535311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:41.536558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:41.536619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:41.536709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:41.536759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:41.536803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:41.537107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:41.713306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.714548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.714756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.714847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.714945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:41.715823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:00.585888Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:00.585956Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:213:2214], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:14:00.586815Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6262: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2026-01-10T12:14:00.587023Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186234409546 2026-01-10T12:14:00.587353Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2026-01-10T12:14:00.589715Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:00.589927Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:00.590020Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:00.590099Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-10T12:14:00.590188Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:14:00.590385Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:14:00.595680Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 2 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-10T12:14:00.595842Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:00.596040Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:800:2770], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 2, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:00.596242Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-10T12:14:00.596281Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-10T12:14:00.596437Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-10T12:14:00.596472Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:892:2836], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-10T12:14:00.597501Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:00.597587Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:00.597713Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186234409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:14:00.597973Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:14:00.598023Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:14:00.598544Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:14:00.598638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:00.598679Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [8:1041:2983] TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:00.599246Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:00.599501Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 268us result status StatusSuccess 2026-01-10T12:14:00.599958Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:00.600773Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2026-01-10T12:14:00.601002Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 261us result status StatusSuccess 2026-01-10T12:14:00.601536Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409546 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976720657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink >> TSchemeShardSubDomainTest::SimultaneousDefine ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:00.974681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:00.974778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:00.974818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:00.974864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:00.974929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:00.974969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:00.975047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:00.975136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:00.976005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:00.976354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.113318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.113410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.131496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.131899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.132136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.142533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.142978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.143727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.144247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.147251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.147461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.148743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.148812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.149044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.149102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.149148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.149309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.156693Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:01.323236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:01.323534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.323789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:01.323859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:01.324150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:01.324257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:01.332776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.332985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:01.333226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.333294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:01.333348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:01.333382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:01.344990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.345062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:01.345106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:01.359628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.359705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.359759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.359833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:01.367573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:01.374528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:01.374803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:01.376150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.376307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:01.376390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.376714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:01.376776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.376963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:01.377060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:01.381375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.381450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 10T12:14:01.463970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:01.463999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:01.465900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:01.467110Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 2026-01-10T12:14:01.467337Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-10T12:14:01.467430Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-10T12:14:01.467554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-10T12:14:01.467995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-10T12:14:01.468443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:01.468621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:01.468677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:282:2272] 2026-01-10T12:14:01.468796Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 2026-01-10T12:14:01.469042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.469323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:14:01.469621Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-10T12:14:01.469750Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-10T12:14:01.469942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:01.470241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:14:01.470648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:14:01.470818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:01.471813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:01.472001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:01.474103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:01.474302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:01.474710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:01.474781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:01.474947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:01.475478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:01.475549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:01.475628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:01.476323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:01.479026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:01.479237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:01.479336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:14:01.482558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:01.482724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:01.482911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:01.482978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:01.483684Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:01.483942Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 220us result status StatusPathDoesNotExist 2026-01-10T12:14:01.484181Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:01.484658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:01.485004Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 354us result status StatusSuccess 2026-01-10T12:14:01.485496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.194943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.195053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.195094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.195138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.195203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.195254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.195330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.195428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.196330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.196669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.290747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.290820Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.308919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.309047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.309252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.321500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.321775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.322544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.322790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.325305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.325501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.326778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.326837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.326918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.326969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.327012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.327282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.334439Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:01.492450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:01.492720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.492922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:01.492972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:01.493205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:01.493277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:01.505033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.505258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:01.505501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.505573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:01.505634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:01.505673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:01.510709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.510785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:01.510832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:01.513084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.513167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.513222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.513289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:01.517731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:01.523867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:01.524170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:01.525383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.525536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:01.525622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.525970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:01.526028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.526238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:01.526320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:01.529163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.529205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 2026-01-10T12:14:01.794358Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:01.794535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:01.794730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2026-01-10T12:14:01.796115Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:14:01.796947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:14:01.797145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2026-01-10T12:14:01.798268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:01.798433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:01.799127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:01.799271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-10T12:14:01.801030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:01.801089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:01.801245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:01.802453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:01.802505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:14:01.802652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:01.802702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:01.802776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:01.803619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:01.803655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:01.803802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:01.803831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:01.806798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:14:01.806837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:14:01.807029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:01.807057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:01.807380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:01.807422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:01.807756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:01.808672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2026-01-10T12:14:01.808989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:14:01.809049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:14:01.809146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:14:01.809180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:14:01.809525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:14:01.809637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:01.809684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:662:2568] 2026-01-10T12:14:01.809901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:14:01.809945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:01.809974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:662:2568] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:01.810426Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:01.810676Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 234us result status StatusPathDoesNotExist 2026-01-10T12:14:01.810847Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:01.811248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:01.811396Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 158us result status StatusSuccess 2026-01-10T12:14:01.811824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:59.804228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:59.804324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:59.804379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:59.804449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:59.804511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:59.804546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:59.804603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:59.804689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:59.805612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:59.805962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:59.909314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:59.909383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:59.929851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:59.929979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:59.930181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:59.943968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:59.944271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:59.945076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:59.945337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:59.951030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:59.951246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:59.952654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:59.952743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:59.952834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:59.952885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:59.952929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:59.953248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:59.965753Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:00.155806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:00.156145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.156395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:00.156455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:00.156719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:00.156814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:00.163011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:00.163277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:00.163538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.163609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:00.163661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:00.163704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:00.166643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.166724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:00.166782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:00.169298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.169370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.169418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:00.169531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:00.179894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:00.182345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:00.182588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:00.183800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:00.183966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:00.184027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:00.184397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:00.184482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:00.184681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:00.184778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:00.187748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:00.187801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.132729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:02.132791Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2026-01-10T12:14:02.132991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 129 2026-01-10T12:14:02.133242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:02.133316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2026-01-10T12:14:02.136812Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.136892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:02.137143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2026-01-10T12:14:02.137344Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.137397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:341:2316], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-10T12:14:02.137456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:341:2316], at schemeshard: 72057594046678944, txId: 108, path id: 5 2026-01-10T12:14:02.137963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.138029Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:14:02.138102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2026-01-10T12:14:02.139102Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.139239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.139293Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:14:02.139342Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:14:02.139394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:02.144599Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.144706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.144767Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:14:02.144816Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2026-01-10T12:14:02.144862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2026-01-10T12:14:02.144968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-10T12:14:02.148145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2026-01-10T12:14:02.149612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-10T12:14:02.149745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2026-01-10T12:14:02.166370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-10T12:14:02.166444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2026-01-10T12:14:02.166621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-10T12:14:02.166691Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-10T12:14:02.169352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.169578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.169665Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-10T12:14:02.169812Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:14:02.169857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.169904Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:14:02.169948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.169985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-10T12:14:02.170059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:500:2449] message: TxId: 108 2026-01-10T12:14:02.170118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.170161Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-10T12:14:02.170201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-10T12:14:02.170362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-10T12:14:02.172926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:14:02.172991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:949:2859] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-10T12:14:02.182386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:02.182675Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.183120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:02.186334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:02.186630Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-10T12:14:02.187163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-10T12:14:02.187223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-10T12:14:02.187864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-10T12:14:02.188005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-10T12:14:02.188049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:982:2892] TestWaitNotification: OK eventTxId 109 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> TSchemeShardSubDomainTest::Redefine ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:45.286132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705013085136632:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:45.286212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:45.338231Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705011335270202:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:45.338275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:45.399208Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:45.400373Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:45.628927Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:45.646440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:45.808456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:45.936935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:45.937043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:45.942328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:45.942395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:46.042889Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:46.061225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:46.088057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:46.281656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:46.292238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:46.292800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:46.294536Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:46.400353Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:46.671359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00363d/r3tmp/yandexNv7jK1.tmp 2026-01-10T12:13:46.671388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00363d/r3tmp/yandexNv7jK1.tmp 2026-01-10T12:13:46.671597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00363d/r3tmp/yandexNv7jK1.tmp 2026-01-10T12:13:46.671720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:46.744841Z INFO: TTestServer started on Port 8429 GrpcPort 24881 PQClient connected to localhost:24881 === TenantModeEnabled() = 1 === Init PQ - start server on port 24881 2026-01-10T12:13:47.470697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:47.472490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:47.472764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:47.472783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:47.473006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:47.473092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:47.477122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:47.477304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:47.477512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:47.477550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:47.477567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:47.477579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:13:47.480315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:47.480375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:47.480391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:13:47.484759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:47.484787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:13:47.484804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:47.485083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:47.485114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:47.485131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:47.485178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:13:47.489435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:47.491785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:13:47.491889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:47.497110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047227541, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:47.497324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047227541 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:47.497352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:47.497653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:13:47.497703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:13:47.497885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:13:47.497931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:13:47.500052Z node 1 :F ... only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2026-01-10T12:14:00.313826Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 auth for : consumer_aba 2026-01-10T12:14:00.314484Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 Handle describe topics response 2026-01-10T12:14:00.314592Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 auth is DEAD 2026-01-10T12:14:00.314675Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 auth ok: topics# 1, initDone# 0 2026-01-10T12:14:00.315829Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 register session: topic# /Root/account1/write_topic 2026-01-10T12:14:00.319565Z :INFO: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] [null] Server session id: consumer_aba_3_2_8942886203531002177_v1 2026-01-10T12:14:00.319886Z :DEBUG: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:00.319837Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][write_topic] pipe [3:7593705075943921791:2386] connected; active server actors: 1 2026-01-10T12:14:00.320498Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 grpc read done: success# 1, data# { read { } } 2026-01-10T12:14:00.320637Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 got read request: guid# 16f146b3-927e179f-f12d0d39-1e21113a 2026-01-10T12:14:00.320488Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7593705075943921791:2386] session consumer_aba_3_2_8942886203531002177_v1 2026-01-10T12:14:00.320560Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2026-01-10T12:14:00.320635Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2026-01-10T12:14:00.320687Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_8942886203531002177_v1" (Sender=[3:7593705075943921788:2386], Pipe=[3:7593705075943921791:2386], Partitions=[], ActiveFamilyCount=0) 2026-01-10T12:14:00.320730Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2026-01-10T12:14:00.320799Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-10T12:14:00.320875Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_8942886203531002177_v1" (Sender=[3:7593705075943921788:2386], Pipe=[3:7593705075943921791:2386], Partitions=[], ActiveFamilyCount=0) 2026-01-10T12:14:00.320987Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_8942886203531002177_v1" sender [3:7593705075943921788:2386] lock partition 0 for ReadingSession "consumer_aba_3_2_8942886203531002177_v1" (Sender=[3:7593705075943921788:2386], Pipe=[3:7593705075943921791:2386], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-10T12:14:00.321063Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-10T12:14:00.321102Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000270s 2026-01-10T12:14:00.323069Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_8942886203531002177_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7593705075943921791 RawX2: 4503612512274770 } Path: "/Root/account1/write_topic" } 2026-01-10T12:14:00.323196Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2026-01-10T12:14:00.323712Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7593705075943921800:2393] 2026-01-10T12:14:00.324779Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_8942886203531002177_v1:1 with generation 1 2026-01-10T12:14:00.339786Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1768047240173 CreateTimestampMS: 1768047240170 SizeLag: 165 WriteTimestampEstimateMS: 1768047240173 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-10T12:14:00.339901Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2026-01-10T12:14:00.339986Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2026-01-10T12:14:00.345543Z :INFO: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] Closing read session. Close timeout: 0.000000s 2026-01-10T12:14:00.345609Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2026-01-10T12:14:00.345658Z :INFO: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] Counters: { Errors: 0 CurrentSessionLifetimeMs: 39 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:14:00.345777Z :NOTICE: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:14:00.345823Z :DEBUG: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] [null] Abort session to cluster 2026-01-10T12:14:00.346452Z :NOTICE: [/Root] [/Root] [eec47d9a-60a4aa4e-12776f92-2db4b817] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:14:00.351921Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 grpc read done: success# 0, data# { } 2026-01-10T12:14:00.351963Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 grpc read failed 2026-01-10T12:14:00.351995Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 grpc closed 2026-01-10T12:14:00.352026Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_8942886203531002177_v1 is DEAD 2026-01-10T12:14:00.358636Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_8942886203531002177_v1 2026-01-10T12:14:00.358330Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][write_topic] pipe [3:7593705075943921791:2386] disconnected. 2026-01-10T12:14:00.358382Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][write_topic] pipe [3:7593705075943921791:2386] disconnected; active server actors: 1 2026-01-10T12:14:00.358409Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][write_topic] pipe [3:7593705075943921791:2386] client consumer_aba disconnected session consumer_aba_3_2_8942886203531002177_v1 2026-01-10T12:14:00.361126Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7593705075943921806:2394], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:14:00.361732Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NzliZjYwOTMtY2JjYzU1OTYtNTkyNzAzZjYtMzkwNjE3NDY=, ActorId: [3:7593705075943921792:2389], ActorState: ExecuteState, LegacyTraceId: 01kekx7s41egtka6qtep39bhyq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:14:00.362195Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.881571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.881665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.881730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.881780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.881856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.881895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.881976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.882068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.882994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.883359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.999710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.999786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:02.043167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:02.043293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:02.043501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:02.080691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:02.080961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:02.081778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.082021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:02.089825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.090044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:02.091490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.091557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.091631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:02.091677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:02.091720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:02.091989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.114210Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:02.288682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:02.288937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.289170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:02.289242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:02.289474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:02.289583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:02.300200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.300467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:02.300692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.300750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:02.300804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:02.300845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:02.310593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.310675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:02.310735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:02.317596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.317701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.317763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.317844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:02.323327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:02.338586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:02.338868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:02.340518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.340694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:02.340763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.341125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:02.341192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.341384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:02.341473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:02.349356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.349431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ted: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2026-01-10T12:14:03.004025Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:14:03.004598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:03.004892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2026-01-10T12:14:03.006603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:14:03.006920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409549 2026-01-10T12:14:03.008625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:03.008849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:03.009594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:03.009831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:03.012874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:03.013150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:03.013961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:03.014282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:03.014366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:03.014529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:03.015445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:03.015509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:14:03.015767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-10T12:14:03.015804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-10T12:14:03.019591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:03.019739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:03.019798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:03.019886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:03.020330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:03.020381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:03.020570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:03.020602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:03.021235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:14:03.021281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:14:03.021679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:03.021724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:03.023698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:03.023774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:03.024059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:03.026017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:14:03.026270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:14:03.026317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:14:03.026820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:14:03.026952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:03.026992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:791:2681] TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:03.027536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:03.027777Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 257us result status StatusPathDoesNotExist 2026-01-10T12:14:03.027979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:03.028547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:03.028790Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 268us result status StatusSuccess 2026-01-10T12:14:03.029282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.776842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.776968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.777012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.777061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.777111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.777137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.777190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.777287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.778111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.778427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.881956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.882020Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.903365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.910057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.910285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.917983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.918211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.918917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.919347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.923111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.923306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.924689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.924754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.924885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.924940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.924987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.925239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.932614Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:02.112403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:02.112668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.112885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:02.112937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:02.113170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:02.113242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:02.115676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.115888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:02.116155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.116219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:02.116267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:02.116302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:02.118733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.118800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:02.118844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:02.120719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.120777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.120826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.120882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:02.125046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:02.126933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:02.127108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:02.128308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.128452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:02.128507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.128804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:02.128858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:02.129030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:02.129118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:02.131154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.131210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 3.158688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:14:03.158936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:03.158982Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-10T12:14:03.159098Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:03.159142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:03.159183Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:03.159217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:03.159254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:14:03.159292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:03.159331Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:14:03.159364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:14:03.159448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:03.159489Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:14:03.159524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:14:03.159562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-10T12:14:03.160340Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:03.160424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:03.160459Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:03.160504Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:14:03.160550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:03.161250Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:03.161332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:03.161365Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:03.161395Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-10T12:14:03.161428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:03.161513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:14:03.161804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:03.161878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:03.161991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:03.162701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:03.162753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:03.162821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:03.167195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:03.169146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:03.169256Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:03.169343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:14:03.169595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:03.169637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:03.170032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:03.170152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:03.170195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:347:2337] TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:03.170651Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:03.170954Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 326us result status StatusPathDoesNotExist 2026-01-10T12:14:03.171112Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:03.171592Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:03.171760Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 192us result status StatusSuccess 2026-01-10T12:14:03.172209Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.689698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.689813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.689859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.689903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.689981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.690015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.690078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.690184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.691041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.691972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.798475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.798541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.819718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.819817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.820033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.828028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.828322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.829118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.829366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.831744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.831948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.833189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.833249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.833329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.833372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.833410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.833700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.005314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.006935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:02.007510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t state 2026-01-10T12:14:02.937488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 2 -> 3 2026-01-10T12:14:02.939482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.939548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:02.939588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 3 -> 128 2026-01-10T12:14:02.941585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.941634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.941684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2026-01-10T12:14:02.941739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.941874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:02.943596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2026-01-10T12:14:02.943738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000043 2026-01-10T12:14:02.944114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.944239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:02.944296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-10T12:14:02.944570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-10T12:14:02.944624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-10T12:14:02.944803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:02.944877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2026-01-10T12:14:02.949188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.949247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:02.949487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.949540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 108, path id: 38 2026-01-10T12:14:02.949919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-10T12:14:02.949970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-10T12:14:02.950099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:14:02.950138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.950190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-10T12:14:02.950229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.950265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-10T12:14:02.950317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-10T12:14:02.950361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-10T12:14:02.950405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-10T12:14:02.950495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:14:02.950534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2026-01-10T12:14:02.950569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 38], 8 2026-01-10T12:14:02.951085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.951180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-10T12:14:02.951219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-10T12:14:02.951264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-10T12:14:02.951318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:02.951447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-10T12:14:02.954685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-10T12:14:02.955189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-10T12:14:02.955238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-10T12:14:02.955857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-10T12:14:02.955966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:14:02.956001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:1005:2961] TestWaitNotification: OK eventTxId 108 2026-01-10T12:14:02.956732Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:02.956992Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 285us result status StatusSuccess 2026-01-10T12:14:02.957480Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-10T12:13:43.544828Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705003288336498:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:43.545896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:43.859670Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:43.793673Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705001327034217:2219];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:43.793735Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:43.840939Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:44.184176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.228197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.406114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:44.406222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:44.416495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:44.416588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:44.605531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:44.615907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:44.664303Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:44.669470Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:44.676983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:44.680230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:44.728272Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:44.806009Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:45.158579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00363e/r3tmp/yandexQgxy22.tmp 2026-01-10T12:13:45.158605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00363e/r3tmp/yandexQgxy22.tmp 2026-01-10T12:13:45.158778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00363e/r3tmp/yandexQgxy22.tmp 2026-01-10T12:13:45.158868Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:45.300151Z INFO: TTestServer started on Port 18004 GrpcPort 16807 PQClient connected to localhost:16807 === TenantModeEnabled() = 1 === Init PQ - start server on port 16807 2026-01-10T12:13:46.253132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:13:46.253319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:46.253535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:13:46.253555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:13:46.253712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:46.253761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:46.257238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:46.257388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:46.257554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:46.257600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:13:46.257613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2026-01-10T12:13:46.257638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 2026-01-10T12:13:46.260468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:46.260510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:13:46.260532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2026-01-10T12:13:46.264509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-10T12:13:46.264531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2026-01-10T12:13:46.264571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2026-01-10T12:13:46.264805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:46.264828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:46.264845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:46.264868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2026-01-10T12:13:46.276234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:46.281217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2026-01-10T12:13:46.281351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:13:46.284095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047226330, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:46.284232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047226330 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:13:46.284256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:46.284513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 128 -> 240 2026-01-10T12:13:46.284544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2026-01-10T12:13:46.284704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:13:46.284740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:13:46.287393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:13:46.287416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPub ... :2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.171583Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.171591Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.171605Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.171612Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.188827Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.188866Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.188879Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.188896Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.188906Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.198378Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.198403Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.198410Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.198423Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.198430Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.198477Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.198501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.198516Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.198536Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.198564Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.271773Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.271803Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.271816Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.271833Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.271844Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.289288Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.289317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.289329Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.289344Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.289355Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.298945Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.298975Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.298997Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.299016Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.299026Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.299083Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.299094Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.299100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.299111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.299117Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.376590Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.376633Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.376647Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.376667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.376680Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.391308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.391364Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.391381Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.391421Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.391437Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.399262Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.399300Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.399313Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.399343Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.399354Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.399408Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.399419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.399426Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.399440Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.399448Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-10T12:14:01.399625Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705080597751424:2609], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:14:01.400625Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=M2I0NGU4NjctMjhhOTZiNTQtOWYwZjEyY2UtNjBlZWUwZWQ=, ActorId: [1:7593705080597751422:2608], ActorState: ExecuteState, LegacyTraceId: 01kekx7t4c3y2jr11dxnwyq8pt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:14:01.401063Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:14:01.478680Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:01.478718Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.478732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:01.478752Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:01.478778Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:03.301640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:03.301753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:03.301799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:03.301865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:03.301928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:03.301976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:03.302041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:03.302131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:03.303061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:03.303463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:03.434649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:03.434728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:03.453628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:03.453732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:03.453923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:03.462150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:03.462384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:03.463150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:03.463346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:03.469325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:03.469527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:03.470874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:03.470940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:03.471028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:03.471086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:03.471130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:03.471405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:03.643966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.645986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.646053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.646153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.646238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.646323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2026-01-10T12:14:04.661053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 3 -> 128 2026-01-10T12:14:04.665239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.669344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.669538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.669697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.669758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.669828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:14:04.669891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-10T12:14:04.670032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:04.672094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-10T12:14:04.672202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000039 2026-01-10T12:14:04.672609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:04.672756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:04.672811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:14:04.673104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-10T12:14:04.673173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:14:04.673340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:04.673413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:14:04.675572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:04.675633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:04.675841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:04.675884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:14:04.676330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.676394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:14:04.676493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:04.676533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:04.676576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:04.676616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:04.676683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:14:04.676722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:04.676761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:14:04.676797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:14:04.677017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:14:04.677068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2026-01-10T12:14:04.677100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-10T12:14:04.677801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:04.677923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:04.677975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:04.678023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:14:04.678063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:04.678155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-10T12:14:04.678208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:721:2711] 2026-01-10T12:14:04.681302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:04.681404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:04.681436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:728:2718] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:04.682086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:04.682333Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 242us result status StatusSuccess 2026-01-10T12:14:04.682754Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:04.364532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:04.364634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:04.364680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:04.364724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:04.364779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:04.364831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:04.364892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:04.364979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:04.365833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:04.366128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:04.476183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:04.476270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:04.494439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:04.494532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:04.494701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:04.520055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:04.520392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:04.521213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:04.521448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:04.524294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:04.527766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:04.529402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:04.529477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:04.529560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:04.529611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:04.529657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:04.529929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.537151Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:04.674164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:04.674383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.674539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:04.674584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:04.674773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:04.674839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:04.677040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:04.677298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:04.677526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.677585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:04.677635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:04.677692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:04.679837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.679904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:04.679945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:04.682846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.682913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.682974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:04.683045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:04.687161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:04.691420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:04.691676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:04.693021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:04.693197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:04.693258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:04.693585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:04.693650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:04.693851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:04.693963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:04.696584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:04.696636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... : 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:04.931657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:04.931738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-10T12:14:04.931770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:14:04.931841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:14:04.933671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:14:04.933809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:04.933848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:04.933881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:04.935407Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-10T12:14:04.936652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:04.937057Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:14:04.937340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:04.937597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:04.938338Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:04.938565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:04.938811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2026-01-10T12:14:04.940178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:04.940384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:04.940927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:04.940985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:04.941167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:04.941456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:04.941679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:04.941734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:04.941806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:04.945061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:04.945116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:04.945205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:04.945243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:04.945858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:04.945918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:04.946551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:04.946654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:14:04.947047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:14:04.947096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:14:04.947575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:14:04.947644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:04.947681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:577:2533] TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:04.948266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:04.948471Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusPathDoesNotExist 2026-01-10T12:14:04.948651Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:04.949243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:04.949474Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 215us result status StatusSuccess 2026-01-10T12:14:04.949879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:00.581206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:00.581304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:00.581367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:00.581415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:00.581454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:00.581482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:00.581563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:00.581655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:00.582501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:00.582830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:00.683621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:00.683687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:00.706267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:00.706381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:00.706579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:00.720044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:00.720306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:00.721085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:00.721318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:00.723981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:00.724179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:00.725437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:00.725498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:00.725574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:00.725621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:00.725661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:00.725916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:00.941394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.942959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:00.943522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:14:05.148748Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:14:05.148787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:14:05.148823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-10T12:14:05.149774Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:05.149861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:05.149900Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:05.149945Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:14:05.150012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:05.150710Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:05.150784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:05.150813Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:05.150846Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-10T12:14:05.150878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:05.150950Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:14:05.152943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:14:05.153041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:05.153080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:05.154265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:05.154692Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:14:05.154899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.155196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:05.155420Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:05.157980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:05.158241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2026-01-10T12:14:05.159853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:05.159933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:05.160107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:05.160573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:05.160630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:05.160701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:05.161452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:05.163246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:05.163308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:05.163534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:05.163598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:05.165541Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:05.165677Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:14:05.166063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:14:05.166123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:14:05.166707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:14:05.166808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:05.166861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:2107:3711] TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:05.177727Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:05.177957Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 271us result status StatusPathDoesNotExist 2026-01-10T12:14:05.178143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:05.179047Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:05.179220Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 180us result status StatusPathDoesNotExist 2026-01-10T12:14:05.179413Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::Restart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.059576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.059666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.059705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.059747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.059802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.059838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.059941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.060027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.060953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.061263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.165296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.165383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.182102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.182222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.182439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.191490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.191745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.192615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.192859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.195283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.195473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.196782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.196844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.196941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.196989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.197035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.197298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.205138Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:01.367638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:01.367932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.368161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:01.368220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:01.368468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:01.368549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:01.371054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.371282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:01.371511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.371594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:01.371670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:01.371712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:01.381063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.381143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:01.381192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:01.387446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.387531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.387597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.387666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:01.391728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:01.394148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:01.394372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:01.395576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.395736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:01.395798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.396161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:01.396227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:01.396410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:01.396497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:01.398748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.398801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 0: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:14:05.156376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.156668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:05.156988Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 2026-01-10T12:14:05.157309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2026-01-10T12:14:05.157530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409546 2026-01-10T12:14:05.159271Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2026-01-10T12:14:05.161007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2026-01-10T12:14:05.161231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2026-01-10T12:14:05.161893Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:05.164408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:05.164678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:05.165061Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 Forgetting tablet 72075186233409555 Forgetting tablet 72075186233409547 2026-01-10T12:14:05.167493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2026-01-10T12:14:05.167774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409557 2026-01-10T12:14:05.169535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-10T12:14:05.170613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:05.170671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2026-01-10T12:14:05.170770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:05.171166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-10T12:14:05.171475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:05.171537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:05.171665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:05.172551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:05.172622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:05.172739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-10T12:14:05.172764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2026-01-10T12:14:05.175624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:14 2026-01-10T12:14:05.175672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2026-01-10T12:14:05.175738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:05.175761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:05.176008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:16 2026-01-10T12:14:05.176068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2026-01-10T12:14:05.176810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:05.177467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:05.177532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:05.177630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:05.178304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:05.180239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2026-01-10T12:14:05.181132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2026-01-10T12:14:05.181191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2026-01-10T12:14:05.182275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2026-01-10T12:14:05.182371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2026-01-10T12:14:05.182425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2147:3930] TestWaitNotification: OK eventTxId 139 2026-01-10T12:14:05.187594Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:05.187858Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 3.75ms result status StatusSuccess 2026-01-10T12:14:05.188339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 4 MaxPaths: 5 MaxChildrenInDir: 4 MaxAclBytesSize: 25 MaxTableColumns: 3 MaxTableColumnNameLength: 10 MaxTableKeyColumns: 1 MaxTableIndices: 20 MaxShards: 6 MaxShardsInPath: 4 MaxConsistentCopyTargets: 1 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 20 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:55.693632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:55.693751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.693806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:55.693864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:55.693903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:55.693936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:55.693993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:55.694079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:55.694992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:55.695412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:55.815651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:55.815715Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:55.851203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:55.851324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:55.851541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:55.869579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:55.869865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:55.870612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:55.870858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:55.873361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.873550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:55.874898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:55.874968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:55.875042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:55.875091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:55.875137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:55.875384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:55.883152Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:13:56.049079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:13:56.049348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.049578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:13:56.049628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:13:56.049978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:13:56.050089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:56.057181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:56.057500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:13:56.057713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.057782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:13:56.057833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:13:56.057871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:13:56.065185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.065262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:13:56.065309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:13:56.068031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.068153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:13:56.068222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:56.068288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:13:56.091210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:13:56.093577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:13:56.093785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:13:56.094911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:56.095048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:13:56.095105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:56.095415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:13:56.095484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:13:56.095639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:13:56.095711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:13:56.098367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:56.098416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 Leader for TabletID 72057594046678944 is [2:1101:3022] sender: [2:1176:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 2026-01-10T12:14:06.032408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.2206 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 2026-01-10T12:14:06.068812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 0 2026-01-10T12:14:06.069072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:06.069160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:06.069296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 2026-01-10T12:14:06.080407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 2026-01-10T12:14:06.103672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 2026-01-10T12:14:06.116159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 1 2026-01-10T12:14:06.116350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:06.116433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:06.116552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:14:06.116607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-10T12:14:06.116640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:06.116680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF469DEDD00, stats written 3 2026-01-10T12:14:06.117314Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:06.117522Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 256us result status StatusSuccess 2026-01-10T12:14:06.118090Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:05.892016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:05.892123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.892192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:05.892250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:05.892326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:05.892365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:05.892443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.892540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:05.893491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:05.893858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:05.996322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:05.996400Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:06.015579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:06.015684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:06.015885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:06.024964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:06.025258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:06.026107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:06.026357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:06.028768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.030961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:06.032295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:06.032356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.032428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:06.032480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:06.032518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:06.032748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.042082Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:06.189369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:06.189628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.189822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:06.189896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:06.190135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:06.190208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:06.196253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:06.196519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:06.196765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.196827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:06.196880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:06.196918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:06.199282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.199350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:06.199399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:06.205118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.205186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.205240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:06.205316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:06.212825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:06.217399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:06.217631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:06.218796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:06.218943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:06.219003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:06.219328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:06.219393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:06.219551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:06.219621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:06.221364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:06.221399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... G: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:06.398108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.398218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.398464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2026-01-10T12:14:06.398816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.398912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-10T12:14:06.399257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.399355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.399488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:06.399549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:06.399596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:06.399623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:06.399728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.399818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.400027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-10T12:14:06.400476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.400638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.401992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.402945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.403010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.403084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.408159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:06.411311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:06.411388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.411596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:06.411665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:06.411731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:06.411886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:467:2420] sender: [1:527:2058] recipient: [1:15:2062] 2026-01-10T12:14:06.479451Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:06.479704Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 277us result status StatusSuccess 2026-01-10T12:14:06.480260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:06.481052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:06.481278Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 237us result status StatusSuccess 2026-01-10T12:14:06.481716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:13:39.861042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:13:39.861132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:39.861175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:13:39.861214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:13:39.861267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:13:39.861300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:13:39.861358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:13:39.861426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:13:39.862316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:13:39.862650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:13:40.034744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:13:40.034802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:40.073587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:13:40.073715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:13:40.073902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:13:40.087065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:13:40.087310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:13:40.088337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:13:40.088573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:13:40.091112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:40.091332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:13:40.092605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:13:40.092674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:13:40.092749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:13:40.092798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:13:40.092847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:13:40.093102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:13:40.277192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.278938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:13:40.279372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6 2026-01-10T12:14:06.086059Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1173:3078], at schemeshard: 72075186233409546, txId: 116, path id: 1 2026-01-10T12:14:06.086131Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1173:3078], at schemeshard: 72075186233409546, txId: 116, path id: 39 2026-01-10T12:14:06.086956Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-10T12:14:06.087049Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2026-01-10T12:14:06.087473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-10T12:14:06.088316Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-10T12:14:06.088490Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-10T12:14:06.088557Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-10T12:14:06.088626Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 19 2026-01-10T12:14:06.088708Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 14 2026-01-10T12:14:06.089373Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-10T12:14:06.089464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-10T12:14:06.089498Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-10T12:14:06.089541Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 39], version: 1 2026-01-10T12:14:06.089586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 39] was 4 2026-01-10T12:14:06.089682Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2026-01-10T12:14:06.094647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2026-01-10T12:14:06.094900Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2026-01-10T12:14:06.094974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-10T12:14:06.099410Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-10T12:14:06.099796Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2026-01-10T12:14:06.100024Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-10T12:14:06.100122Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-10T12:14:06.100336Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-10T12:14:06.100427Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2026-01-10T12:14:06.100557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-10T12:14:06.100728Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 116:0 2 -> 3 2026-01-10T12:14:06.104218Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-10T12:14:06.104526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-10T12:14:06.108654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-10T12:14:06.109188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-10T12:14:06.109289Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2026-01-10T12:14:06.109434Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2026-01-10T12:14:06.110077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1120 RawX2: 30064774108 } TxBody: "\n\342\004\n\007Table11\020\'\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\':\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2026-01-10T12:14:06.120988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2026-01-10T12:14:06.121264Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2026-01-10T12:14:06.194473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2026-01-10T12:14:06.200112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2026-01-10T12:14:06.200593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:05.867223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:05.867314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.867363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:05.867422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:05.867477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:05.867512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:05.867605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.867695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:05.868606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:05.868935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:05.962299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:05.962366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:05.978242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:05.978335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:05.978528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:05.986792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:05.987045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:05.987764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.987967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:05.990184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.990363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:05.991638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:05.991695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.991763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:05.991806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:05.991844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:05.992071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.148869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.150964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.151046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.151148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.151255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.151343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.151444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:06.960047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:06.960182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.960228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:06.960344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-10T12:14:06.960632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.960676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:06.960819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:06.960871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:06.960918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:06.960954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:06.960998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:06.961043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:06.961107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:06.961145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:06.961225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:14:06.961266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-10T12:14:06.961304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:14:06.961350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:14:06.962224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:06.962349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:06.962389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:06.962431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:14:06.962477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:06.963209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:06.963292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:06.963320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:06.963351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:14:06.963398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:06.963469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-10T12:14:06.970591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:06.971830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-10T12:14:06.972187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:06.972244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-10T12:14:06.972355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:06.972394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:06.972886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:06.973047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:06.973096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:06.973140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:722:2712] 2026-01-10T12:14:06.973321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:06.973352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:722:2712] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:06.973794Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:06.974032Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusSuccess 2026-01-10T12:14:06.974607Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:06.975123Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:06.975333Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 192us result status StatusPathDoesNotExist 2026-01-10T12:14:06.975497Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:03.836249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:03.836350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:03.836395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:03.836442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:03.836487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:03.836517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:03.836575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:03.836664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:03.837680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:03.838051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:03.940695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:03.940756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:03.972568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:03.977102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:03.978875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:03.997007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:03.997242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:03.998240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:03.998710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:04.003588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:04.003824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:04.005209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:04.005268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:04.005386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:04.005443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:04.005493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:04.005713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:04.188159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.189925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:04.190410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:07.073320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:07.073363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:07.073852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:14:07.074980Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-10T12:14:07.076789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.077127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:14:07.077875Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:14:07.078634Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:07.078748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:07.081691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409548 2026-01-10T12:14:07.082882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:07.083095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409547 2026-01-10T12:14:07.086417Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-10T12:14:07.086714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:07.086941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:14:07.089889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.089956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:14:07.090065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:07.091239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.091298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:07.091451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:14:07.091839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:14:07.092257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:14:07.092531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:07.092580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:07.095914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:07.095969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:07.096062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:07.096109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:07.096178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:07.096216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:07.096443Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:07.096569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:07.096641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.096683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:07.096768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:07.099544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:14:07.101992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:14:07.102049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:14:07.102494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:14:07.102605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:14:07.102646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1065:3017] TestWaitNotification: OK eventTxId 105 2026-01-10T12:14:07.104045Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:07.104294Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 298us result status StatusPathDoesNotExist 2026-01-10T12:14:07.104487Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:07.105108Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:07.105275Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2026-01-10T12:14:07.105407Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D83258A8100 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:02.741099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:02.741193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:02.741241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:02.741291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:02.741352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:02.741416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:02.741485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:02.741574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:02.742475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:02.742811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:02.853425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:02.853513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:02.875339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:02.875471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:02.875633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:02.888649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:02.888897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:02.889712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:02.889950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:02.892846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.893079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:02.894430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:02.894497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:02.894606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:02.894673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:02.894724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:02.895010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:03.118377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.119528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.119694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.119781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.119868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.119987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:03.120830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 Leader for TabletID 72057594046678944 is [1:902:2851] sender: [1:976:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 2026-01-10T12:14:07.584150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 2026-01-10T12:14:07.584308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 0 2026-01-10T12:14:07.584439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:07.584499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:07.584900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:14:07.584971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:07.585035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:07.585135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D832591C500, stats written 2 2026-01-10T12:14:07.585837Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:07.588851Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 277us result status StatusSuccess 2026-01-10T12:14:07.589368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:12:17.419987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:12:17.420129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:17.420178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:12:17.420220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:12:17.420265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:12:17.420314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:12:17.420403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:12:17.420487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:12:17.421443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:12:17.421797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:12:17.521670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:12:17.521749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:17.536312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:12:17.536432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:12:17.536608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:12:17.548881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:12:17.549128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:12:17.549758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:12:17.549955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:12:17.551885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:17.552012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:12:17.553029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:12:17.553075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:12:17.553190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:12:17.553237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:12:17.553279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:12:17.553571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:12:17.679568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.680533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.680670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.680787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.680886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.680952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:12:17.681622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 2 2026-01-10T12:14:04.402454Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:2 2026-01-10T12:14:04.402476Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:2 2026-01-10T12:14:04.402531Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 59] was 1 2026-01-10T12:14:04.403042Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [24:128:2152], Recipient [24:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:14:04.403087Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:14:04.403137Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:04.403166Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 59], at schemeshard: 72057594046678944 2026-01-10T12:14:04.403214Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 1 2026-01-10T12:14:04.403241Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 57], at schemeshard: 72057594046678944 2026-01-10T12:14:04.403269Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 7 2026-01-10T12:14:04.406784Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:14:04.406879Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [24:3101:4570] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710759 at schemeshard: 72057594046678944 2026-01-10T12:14:04.407190Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435106, Sender [24:3101:4570], Recipient [24:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-10T12:14:04.407235Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5475: StateWork, processing event TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-10T12:14:04.407347Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:14:04.407713Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:04.409153Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:14:04.421597Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [24:3168:4634], Recipient [24:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:14:04.421652Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:14:04.421678Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-10T12:14:04.744192Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [24:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:14:04.744268Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:14:04.744558Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [24:128:2152], Recipient [24:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:14:04.744592Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:14:04.823895Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [24:3188:4654], Recipient [24:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-10T12:14:04.823984Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:14:04.824553Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:14:04.824883Z node 24 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 327us result status StatusSuccess 2026-01-10T12:14:04.825956Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 11 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 11 IsBackup: false CdcStreams { Name: "000000000000000A_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 46 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "09700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 48 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "19700101000019Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 61 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 50 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 56 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:06.325361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:06.325468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:06.325509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:06.325561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:06.325599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:06.325621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:06.325664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:06.325725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:06.326449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:06.326755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:06.423099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:06.423170Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:06.440219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:06.440318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:06.440515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:06.450144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:06.450478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:06.451209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:06.451424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:06.454963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.455139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:06.456432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:06.456492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:06.456559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:06.456604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:06.456645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:06.456936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:06.662884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.664966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:06.665582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-10T12:14:07.761002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-10T12:14:07.761058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-01-10T12:14:07.763236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.763309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.763498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:07.763650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.763706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:07.763757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-10T12:14:07.763834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.763918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:07.764045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:07.764110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:07.764154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:07.764191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:07.764236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:07.764308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:07.764372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:07.764412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:07.764696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-10T12:14:07.764756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-01-10T12:14:07.764812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:14:07.764862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:14:07.768377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:07.768526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:07.768574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:07.768652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:14:07.768709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:07.769778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:07.769873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:07.769903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:07.769941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:14:07.770011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-10T12:14:07.770098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-01-10T12:14:07.770151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:968:2885] 2026-01-10T12:14:07.774612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:07.779710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:07.779860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:07.779899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:969:2886] TestWaitNotification: OK eventTxId 100 2026-01-10T12:14:07.780473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:07.780711Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 278us result status StatusSuccess 2026-01-10T12:14:07.781234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:14:07.786288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:07.786515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.786673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:14:07.789540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:07.789921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:07.090695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:07.090805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.090855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:07.090901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:07.090962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:07.091000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:07.091079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.091160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:07.092029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:07.092383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:07.198581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:07.198678Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:07.213859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:07.213967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:07.214190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:07.225780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:07.226035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:07.226741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.226986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:07.232496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.232714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:07.233916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.233983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.234053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:07.234116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.234164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:07.234457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.242197Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:07.371757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:07.371941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.372181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:07.372222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:07.372390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:07.372496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:07.379733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.379938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:07.380169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.380227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:07.380287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:07.380327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:07.382003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.382046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:07.382076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:07.383727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.383766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.383799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:07.383848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:07.386841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:07.388528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:07.388732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:07.389678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.389776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:07.389828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:07.390082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:07.390132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:07.390269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:07.390347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:07.391741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.391781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... eTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2026-01-10T12:14:07.911458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-10T12:14:07.911911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-10T12:14:07.913015Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:14:07.914289Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2026-01-10T12:14:07.915473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.915753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:07.916693Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:07.917336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:07.917578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2026-01-10T12:14:07.918897Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:14:07.919020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:07.919171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-10T12:14:07.921152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:07.921383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-10T12:14:07.924853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:14:07.925156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.925229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:14:07.925324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:07.925958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:14:07.926309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.926360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:07.926546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:07.926955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:07.927014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:14:07.933427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:07.933506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:07.933608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:07.933633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:07.933971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:07.933999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:07.934047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:07.934116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:07.935117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:07.935221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:07.935276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:07.935377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:07.935635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:07.937028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:14:07.937325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:14:07.937388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:14:07.938016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:14:07.938133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:14:07.938201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:796:2698] TestWaitNotification: OK eventTxId 106 2026-01-10T12:14:07.939048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:07.939282Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 262us result status StatusSuccess 2026-01-10T12:14:07.939747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> TSchemeShardSubDomainTest::DeclareAndDelete >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] >> Compression::WriteGZIP [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> GenericFederatedQuery::ClickHouseSelectCount >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> Compression::WriteZSTD >> GenericFederatedQuery::PostgreSQLSelectCount >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:07.097684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:07.097813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.097860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:07.097912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:07.097982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:07.098024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:07.098110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.098219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:07.099237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:07.099626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:07.196771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:07.196850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:07.213951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:07.214047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:07.214197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:07.229738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:07.229979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:07.230683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.230886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:07.233133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.233312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:07.234464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.234518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.234580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:07.234634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.234685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:07.234943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.386137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.390962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.391577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... -10T12:14:11.672827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:11.672888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:11.673085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-10T12:14:11.673287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:11.673346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-10T12:14:11.673401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-10T12:14:11.673866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:11.673933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-10T12:14:11.674026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:11.674105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-10T12:14:11.674170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-10T12:14:11.675183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:11.675314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:11.675368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:11.675416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-10T12:14:11.675461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-10T12:14:11.676771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:11.676865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:11.676895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:11.676930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-10T12:14:11.676963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-10T12:14:11.677041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:14:11.680454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:11.680526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-10T12:14:11.680906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-10T12:14:11.681110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:11.681154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.681192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:11.681243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.681281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:14:11.681362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1003:2944] message: TxId: 104 2026-01-10T12:14:11.681407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.681446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:14:11.681485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:14:11.681594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-10T12:14:11.682172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:11.682214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:11.682928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:11.683320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:11.684728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:11.684792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-10T12:14:11.684896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:11.684948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1426:3345] 2026-01-10T12:14:11.685865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:11.687659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-10T12:14:11.687911Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 274us result status StatusSuccess 2026-01-10T12:14:11.688557Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D2BAB4A8100 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:07.468988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:07.469084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.469127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:07.469174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:07.469238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:07.469293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:07.469375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.469476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:07.470367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:07.470703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:07.539323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:07.539394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:07.551536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:07.551638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:07.551804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:07.559576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:07.559792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:07.560606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.560808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:07.563094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.563286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:07.564466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.564525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.564603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:07.564647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.564685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:07.564984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.714312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.715930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.716642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 Leader for TabletID 72057594046678944 is [1:899:2848] sender: [1:972:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 2026-01-10T12:14:12.039706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 0 2026-01-10T12:14:12.039894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:12.039958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:12.040059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 2026-01-10T12:14:12.050561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 2026-01-10T12:14:12.061605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 1 2026-01-10T12:14:12.061772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:12.061843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:12.061941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D2BAB5DB900, stats written 2 2026-01-10T12:14:12.062475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:12.062740Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 292us result status StatusSuccess 2026-01-10T12:14:12.063289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:10.283642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:10.283747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.283791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:10.283835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:10.283911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:10.283950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:10.284019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.284149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:10.285086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:10.285449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:10.375724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:10.375819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:10.398119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:10.398234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:10.398449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:10.407763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:10.408022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:10.410202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.410504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:10.413130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:10.413318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:10.414582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.414642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:10.414726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:10.414801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:10.414843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:10.415088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.423931Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:10.554496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:10.554750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.554953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:10.554998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:10.555224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:10.555289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:10.559379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.559624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:10.559831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.559889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:10.559926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:10.559951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:10.565713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.565792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:10.565835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:10.569343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.569409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.569451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.569512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:10.574087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:10.577503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:10.577717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:10.578849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.578995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:10.579051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.579366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:10.579420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.579599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:10.579675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:10.587000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.587052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... lt> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2026-01-10T12:14:11.290682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:654: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2026-01-10T12:14:11.290731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-10T12:14:11.290762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2026-01-10T12:14:11.306442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.306659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.353969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2026-01-10T12:14:11.354101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-10T12:14:11.354161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-10T12:14:11.354205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.354237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2026-01-10T12:14:11.354279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:14:11.354507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2026-01-10T12:14:11.354624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-10T12:14:11.354675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-10T12:14:11.354712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.354770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:14:11.354963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:14:11.355101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:11.355188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:14:11.359199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.359467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.359707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.359744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.359864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2026-01-10T12:14:11.360037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.360062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2317], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-10T12:14:11.360181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2317], at schemeshard: 72057594046678944, txId: 104, path id: 3 2026-01-10T12:14:11.360665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.360720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:14:11.360818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:11.360866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.360919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:11.360954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.361006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:14:11.361047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:11.361083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:14:11.361113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:14:11.361305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2026-01-10T12:14:11.361349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:14:11.361383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-10T12:14:11.361408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-10T12:14:11.362022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:11.362091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:11.362118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:11.362148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:14:11.362183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:11.362593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:11.362651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:14:11.362686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:14:11.362712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-10T12:14:11.362733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2026-01-10T12:14:11.362772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:14:11.367071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:14:11.367263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.198250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.198353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.198399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.198456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.198505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.198545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.198602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.198700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.199422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.199719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.282694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.282767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.300299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.300440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.300673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.310660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.310983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.311921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.312220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.315223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.315432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.316953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.317033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.317120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.317178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.317227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.317505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.326269Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:11.464449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:11.464694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.464917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:11.464992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:11.465247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:11.465334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:11.468240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.468469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:11.468739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.468810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:11.468869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:11.468914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:11.471435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.471515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:11.471577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:11.473825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.473887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.473953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:11.474013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:11.477776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:11.479433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:11.479624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:11.480817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.480958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:11.481018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:11.481315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:11.481375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:11.481578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:11.481671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:11.486237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.486296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 7594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-10T12:14:11.919215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-10T12:14:11.919293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-10T12:14:11.920485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 625 RawX2: 4294969835 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:14:11.920552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2026-01-10T12:14:11.920702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 625 RawX2: 4294969835 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:14:11.920780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:14:11.920882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 625 RawX2: 4294969835 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:14:11.920978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.921021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.921084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2026-01-10T12:14:11.921150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:14:11.925079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.925241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.926571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.926689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.926982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.927037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:14:11.927161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:11.927205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.927253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:11.927289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.927331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:14:11.927398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:280:2270] message: TxId: 101 2026-01-10T12:14:11.927453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.927512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:14:11.927561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:14:11.927697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-10T12:14:11.929572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:11.929618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:281:2271] TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:11.930185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.930453Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 237us result status StatusSuccess 2026-01-10T12:14:11.931051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:11.931727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.931952Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 247us result status StatusSuccess 2026-01-10T12:14:11.932533Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.689717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.689808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.689845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.689888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.689944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.689977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.690034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.690121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.690969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.691284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.785361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.785436Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.803929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.804041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.804299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.814402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.814663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.815437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.815639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.818150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.818343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.819564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.819626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.819697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.819740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.819791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.820003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.827530Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:11.996698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:11.996937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.997133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:11.997187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:11.997389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:11.997453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:12.000163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:12.000367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:12.000537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.000591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:12.000665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:12.000719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:12.003278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.003340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:12.003386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:12.005302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.005384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.005457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:12.005516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:12.008589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:12.010581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:12.010879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:12.012116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:12.012270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:12.012333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:12.012711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:12.012782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:12.013016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:12.013080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:12.015100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:12.015151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... blishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:14:12.273480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:12.273518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2317], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:12.273563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2317], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-10T12:14:12.273990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.274033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:12.274132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:12.274166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:12.274207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:12.274241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:12.274301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:12.274409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:12.274450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:12.274498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:12.274645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:14:12.274688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-10T12:14:12.274723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-10T12:14:12.274754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-10T12:14:12.275300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:12.275376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:12.275448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:12.275506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-10T12:14:12.275552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:12.276293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:12.276375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:12.276404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:12.276433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-10T12:14:12.276486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:12.276571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-10T12:14:12.279590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:12.280652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-10T12:14:12.280902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:12.280956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-10T12:14:12.281365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:12.281449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:12.281499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:487:2436] TestWaitNotification: OK eventTxId 100 2026-01-10T12:14:12.281983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:12.282251Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 294us result status StatusSuccess 2026-01-10T12:14:12.282704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:12.283194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:12.283356Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2026-01-10T12:14:12.283773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:07.073407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:07.073471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.073498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:07.073538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:07.073598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:07.073628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:07.073684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.073765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:07.074452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:07.074678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:07.164846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:07.164916Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:07.183171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:07.183281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:07.183469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:07.191684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:07.191938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:07.192740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.192966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:07.195161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.195359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:07.196590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.196656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.196722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:07.196770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.196807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:07.197082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.343881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.344974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.345272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... lete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.765011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.765315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.765365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:11.765550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:14:11.765724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.765770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:14:11.765892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:14:11.766414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.766488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:14:11.766571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.766629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:14:11.766675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-10T12:14:11.767621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:11.767740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:11.767785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:11.767825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-10T12:14:11.767870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:11.768813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:11.768889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:11.768918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:11.768945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:14:11.768971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:14:11.769018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:14:11.772025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.772096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:11.772573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:14:11.772762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:11.772807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:11.772837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:11.772866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:11.772952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:14:11.773023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:814:2781] message: TxId: 103 2026-01-10T12:14:11.773093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:11.773134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:14:11.773185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:14:11.773273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:14:11.773861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.773915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:11.774997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:11.775448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:11.776709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.776781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-10T12:14:11.776878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:11.776930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1064:2997] 2026-01-10T12:14:11.777527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:11.778175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.778359Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 174us result status StatusSuccess 2026-01-10T12:14:11.778730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2026-01-10T12:13:37.692730Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704975631321155:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:37.692928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:37.813296Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704977466193221:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:37.813349Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:37.887469Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:37.922134Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:37.931562Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:38.456656Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.505098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:38.584865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.584978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.593801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:38.593883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:38.658596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.681651Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:38.723081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:38.739116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:38.745606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704975631320981:2082] 1768047217599405 != 1768047217599408 2026-01-10T12:13:38.765815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.766352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:38.766445Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:38.861429Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:39.015378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003644/r3tmp/yandex8CQS57.tmp 2026-01-10T12:13:39.015412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003644/r3tmp/yandex8CQS57.tmp 2026-01-10T12:13:39.015574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003644/r3tmp/yandex8CQS57.tmp 2026-01-10T12:13:39.015706Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:39.118372Z INFO: TTestServer started on Port 25929 GrpcPort 30679 PQClient connected to localhost:30679 2026-01-10T12:13:39.789481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:40.062344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:42.680503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704975631321155:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:42.680578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:42.817089Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704977466193221:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:42.817172Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:44.405748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705005696093560:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:44.410473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:44.411273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705005696093573:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:44.411334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705005696093574:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:44.411373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:44.417952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:44.460035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705005696093577:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:13:44.549537Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705005696093666:3090] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:13:44.926896Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705005696093676:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:44.928522Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGU5ZDYzOTgtYjhhZTNlM2YtN2VkNmU3OWItZmY2ODVjZmQ=, ActorId: [1:7593705005696093540:2334], ActorState: ExecuteState, LegacyTraceId: 01kekx79jhc1y1cc85fyz3z3ks, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:44.930819Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:13:44.932062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.060350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:45.296442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part ... cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:09.384432Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:09.384477Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384493Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.384515Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384531Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:09.384539Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-10T12:14:09.384549Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384562Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.384578Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384587Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:09.384600Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:09.384611Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384621Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.384635Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384635Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:09.384643Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-10T12:14:09.384644Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384652Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.384690Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.384703Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-10T12:14:09.419350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:09.419350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:09.419375Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419381Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.419393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.419413Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419414Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-10T12:14:09.419425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-10T12:14:09.419478Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:09.419488Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.419508Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.419516Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-10T12:14:09.425008Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593705059084067962:2118], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:14:09.425138Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593705059084067962:2118], cacheItem# { Subscriber: { Subscriber: [3:7593705076263938412:2990] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:14:09.425293Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593705114918646625:4859], recipient# [3:7593705114918646624:2552], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:14:09.449933Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:09.449943Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:09.449966Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.449978Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.449982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.449993Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.450000Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.450013Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:09.450018Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.450031Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:09.485263Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:09.485299Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485312Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.485340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:09.485353Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-10T12:14:09.485366Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485380Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.485400Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:09.485402Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485412Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485416Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:09.485421Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.485434Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-10T12:14:09.485479Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:09.485489Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485498Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:09.485510Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:09.485519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:13:23.642962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:23.643160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:23.775535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:23.777787Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:13:23.785914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:23.786380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:23.786491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:23.788487Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:13:23.788841Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:23.788885Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:13:24.347765Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:24.614703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:24.614829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:24.615801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:24.615904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:24.674091Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:24.674991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:24.675397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:24.846827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:24.892067Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:13:25.819769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:25.903289Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:25.903470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:26.277890Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:224:2183] Handle TEvProposeTransaction 2026-01-10T12:13:26.277982Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:224:2183] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:13:26.278122Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:224:2183] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1565:3022] 2026-01-10T12:13:26.492799Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1565:3022] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:13:26.492902Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1565:3022] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:13:26.493704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1565:3022] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:13:26.493829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1565:3022] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:13:26.494347Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1565:3022] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:13:26.494504Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1565:3022] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:13:26.494658Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1565:3022] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:13:26.505423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:26.506098Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1565:3022] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:13:26.511913Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1565:3022] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:13:26.511995Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1565:3022] txid# 281474976710657 SEND to# [1:1493:2980] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:13:26.652610Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1644:3081] 2026-01-10T12:13:26.652959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:13:26.761491Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1646:3082] 2026-01-10T12:13:26.761776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:13:26.799276Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1650:3085] 2026-01-10T12:13:26.799996Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:13:26.813601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:13:26.814101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:13:26.815938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:13:26.816043Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:13:26.816151Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:13:26.816575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:13:26.816715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:13:26.817036Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:13:26.817168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:1752:3081] in generation 1 2026-01-10T12:13:26.817372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:13:26.818929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-10T12:13:26.818988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-10T12:13:26.819032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-10T12:13:26.819333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:13:26.819718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:13:26.819801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [1:1753:3082] in generation 1 2026-01-10T12:13:26.838984Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:13:26.839499Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:13:26.841375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2026-01-10T12:13:26.841448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037892 2026-01-10T12:13:26.841504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037892 2026-01-10T12:13:26.841775Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:13:26.842190Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:13:26.842256Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037892 persisting started state actor id [1:1779:3085] in generation 1 2026-01-10T12:13:26.864950Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1739:2409] 2026-01-10T12:13:26.865541Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-0 ... : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2026-01-10T12:14:10.198838Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [5:2181:3386], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01kekx7z85ff6xahtfce804fwb. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:14:10.198966Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710665;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:14:10.202964Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710665, taskId: 4. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:14:10.203303Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.511488s 2026-01-10T12:14:10.203387Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.511488s 2026-01-10T12:14:10.203710Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2179:3384] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 251817 Tasks { TaskId: 1 CpuTimeUs: 249869 Tables { TablePath: "/Root/table-1" ReadRows: 54 ReadBytes: 432 } ComputeCpuTimeUs: 58 BuildCpuTimeUs: 249811 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 5 StartTimeMs: 1768047250191 CreateTimeMs: 1768047248312 UpdateTimeMs: 1768047250191 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:14:10.203779Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2181:3386], CA [5:2179:3384], CA [6:2177:2515], CA [5:2180:3385], trace_id# 2026-01-10T12:14:10.204118Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2179:3384] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 252860 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 249943 FinishTimeMs: 1768047250193 Tables { TablePath: "/Root/table-1" ReadRows: 54 ReadBytes: 432 } ComputeCpuTimeUs: 132 BuildCpuTimeUs: 249811 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 5 StartTimeMs: 1768047250191 CreateTimeMs: 1768047248312 UpdateTimeMs: 1768047250193 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:14:10.204195Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2179:3384] 2026-01-10T12:14:10.204264Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2181:3386], CA [6:2177:2515], CA [5:2180:3385], trace_id# 2026-01-10T12:14:10.204379Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:14:10.204416Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:14:10.204451Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:14:10.204502Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:14:10.205903Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2180:3385] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 91165 DurationUs: 13000 Tasks { TaskId: 3 StageId: 1 CpuTimeUs: 87612 FinishTimeMs: 1768047250196 InputRows: 2 InputBytes: 14 ComputeCpuTimeUs: 200 BuildCpuTimeUs: 87412 HostName: "ghrun-tjszgfszoq" NodeId: 5 StartTimeMs: 1768047250183 CreateTimeMs: 1768047248563 UpdateTimeMs: 1768047250196 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:14:10.205968Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2180:3385] 2026-01-10T12:14:10.206020Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2181:3386], CA [6:2177:2515], trace_id# 2026-01-10T12:14:10.206635Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [5:1933:3246] SeqNo# 1 Rows# 1 trace_id# 2026-01-10T12:14:10.207916Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2181:3386] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 545360 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 543767 FinishTimeMs: 1768047250198 InputRows: 1 InputBytes: 7 ComputeCpuTimeUs: 111 BuildCpuTimeUs: 543656 HostName: "ghrun-tjszgfszoq" NodeId: 5 StartTimeMs: 1768047250198 CreateTimeMs: 1768047248650 UpdateTimeMs: 1768047250198 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:14:10.207963Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2181:3386] 2026-01-10T12:14:10.208004Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [6:2177:2515], trace_id# 2026-01-10T12:14:10.208463Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [6:2177:2515] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 336061 DurationUs: 19000 Tasks { TaskId: 2 CpuTimeUs: 332878 FinishTimeMs: 1768047250187 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 46 ReadBytes: 368 } ResultRows: 1 ResultBytes: 7 ComputeCpuTimeUs: 214 BuildCpuTimeUs: 332664 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 6 StartTimeMs: 1768047250168 CreateTimeMs: 1768047247978 UpdateTimeMs: 1768047250187 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:14:10.208518Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [6:2177:2515] 2026-01-10T12:14:10.208615Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:14:10.208683Z node 5 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-10T12:14:10.208745Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [5:2169:3246] TxId: 281474976710665. Ctx: { TraceId: 01kekx7z85ff6xahtfce804fwb, Database: , SessionId: ydb://session/3?node_id=5&id=Njk2N2Q5NC1kNjU5YmVmZi1jNzdhOTk5ZS1kNmQ4N2RlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.008877s ReadRows: 100 ReadBytes: 800 RequestUnits# 100 ForceFlag# true trace_id# ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 4 VirtualTimestamp { Step: 3500 TxId: 281474976710663 } Finished: true 2026-01-10T12:14:10.211473Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 4000 2026-01-10T12:14:10.211561Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037890: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 354 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:08.749494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:08.749627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:08.749693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:08.749749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:08.749798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:08.749840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:08.749911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:08.750024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:08.751026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:08.751402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:08.848721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:08.848795Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:08.877887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:08.878046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:08.878288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:08.914564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:08.914899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:08.915885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:08.916223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:08.924219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:08.924453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:08.925970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:08.926056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:08.926145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:08.926230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:08.926282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:08.926596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.123192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.124987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:09.125468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.338218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:11.338333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 130 2026-01-10T12:14:11.338490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:11.338563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:11.339440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.339927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:14:11.341564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.341600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.341758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:11.341909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.341941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:14:11.341981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:14:11.342230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.342275Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-10T12:14:11.342329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:11.342367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.342408Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:11.342438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.342474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:14:11.342510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:11.342548Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:14:11.342584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:14:11.342690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:14:11.342738Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:14:11.342777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:14:11.342810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:14:11.343466Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.343548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.343582Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:11.343622Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:14:11.343690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:11.344993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.345076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.345113Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:11.345156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:14:11.345191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:11.345282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:14:11.345539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:11.345587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:11.345681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:14:11.346247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:11.346308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:11.346388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:11.348422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.349787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.349882Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:11.349951Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:14:11.350181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:11.350228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:11.350583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:11.350670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:11.350708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:751:2740] TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:11.351214Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.351378Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusPathDoesNotExist 2026-01-10T12:14:11.351526Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> GenericFederatedQuery::ClickHouseFilterPushdown >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] Test command err: 2026-01-10T12:14:01.183142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705080178095503:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:01.183316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:01.219294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:01.609032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:01.620105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:01.620245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:01.634215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:01.665542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705080178095283:2081] 1768047241141080 != 1768047241141083 2026-01-10T12:14:01.669196Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.795588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:01.880064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:01.892531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:14:02.189078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:02.919374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:05.685585Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705097550301387:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:05.685626Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:05.712741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:05.834922Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:05.843255Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705097550301196:2081] 1768047245677698 != 1768047245677701 2026-01-10T12:14:05.859772Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:05.890219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:05.890328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:05.904471Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:06.056364Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:06.705439Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:06.767395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:07.780011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:07.964521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:09.617936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:09.618032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:09.618068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:09.618112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:09.618154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:09.618178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:09.618229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:09.618303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:09.619183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:09.619493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:09.711820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:09.711899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:09.728934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:09.734686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:09.734914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:09.744482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:09.744833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:09.745713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.746141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:09.758260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.758495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:09.760291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:09.760373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.760528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:09.760590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:09.760637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:09.760919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.781002Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:09.971813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:09.972109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.972330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:09.972383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:09.972605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:09.972679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:09.977666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.977969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:09.978307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.978394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:09.978462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:09.978523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:09.984790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.984875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:09.984920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:09.987445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.987508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.987564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.987626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:09.992789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:09.995867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:09.996167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:09.998004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.998224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:09.998314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.998785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:09.998881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.999150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:09.999318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:10.001802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.001850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-10T12:14:11.205144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-01-10T12:14:11.205188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2026-01-10T12:14:11.205232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2026-01-10T12:14:11.206433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.206537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.206586Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:11.206631Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2026-01-10T12:14:11.206684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:11.208172Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.208261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:11.208315Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:11.208353Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-10T12:14:11.208390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2026-01-10T12:14:11.208491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-10T12:14:11.208538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:898:2742] 2026-01-10T12:14:11.211139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.213540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:11.213650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:11.213689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:899:2743] TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:11.214254Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.214506Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 257us result status StatusSuccess 2026-01-10T12:14:11.214993Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:11.215638Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.215828Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 215us result status StatusSuccess 2026-01-10T12:14:11.216272Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:11.216895Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:11.217069Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 199us result status StatusSuccess 2026-01-10T12:14:11.217463Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.745176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.745278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.745317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.745358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.745394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.745422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.745471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.745549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.746411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.746711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.833559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.833600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.844320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.848484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.848674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.854442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.854588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.855145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.855522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.858632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.858796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.859849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.859916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.860008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.860039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.860070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.860282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.866439Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:11.990035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:11.990263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.990406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:11.990439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:11.990604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:11.990661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:11.994749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.994952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:11.995148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.995227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:11.995277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:11.995309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:11.997703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.997761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:11.997814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:11.999793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.999845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.999899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:11.999970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:12.004067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:12.006259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:12.006453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:12.007522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:12.007676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:12.007731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:12.008029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:12.008155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:12.008316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:12.008395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:12.010299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:12.010335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... CE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.221189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-10T12:14:12.221477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.221590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.221733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:12.221789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:12.221823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:14:12.221848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:14:12.221994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.222087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.222307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-10T12:14:12.222689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.222822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.223869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.224928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.225092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.225159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.225228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.231252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:12.234110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:12.234206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:12.234992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:12.235068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:12.235128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:12.236169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2026-01-10T12:14:12.303378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:12.303449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:458:2411] sender: [1:522:2058] recipient: [1:15:2062] 2026-01-10T12:14:12.304666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:12.304797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:12.304842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:520:2459] TestWaitNotification: OK eventTxId 100 2026-01-10T12:14:12.305354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:12.305595Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 265us result status StatusSuccess 2026-01-10T12:14:12.306147Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:12.306757Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:12.306942Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 192us result status StatusSuccess 2026-01-10T12:14:12.307423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D692D1AA900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:15.542342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:15.542427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:15.542462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:15.542504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:15.542559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:15.542584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:15.542644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:15.542720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:15.543491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:15.543758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:15.630693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:15.630762Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:15.645679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:15.645782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:15.645993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:15.653951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:15.654208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:15.654890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:15.655114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:15.657261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:15.657413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:15.658622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:15.658687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:15.658755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:15.658799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:15.658838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:15.659136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:15.809624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.810692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.810829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.810900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.810989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:15.811757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 Leader for TabletID 72057594046678944 is [1:899:2848] sender: [1:973:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 2026-01-10T12:14:19.621001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 2026-01-10T12:14:19.621210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 0 2026-01-10T12:14:19.621371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:19.621455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:19.621611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:14:19.621689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:19.621733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:19.621778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D692D29BD00, stats written 2 2026-01-10T12:14:19.622307Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:19.622587Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 307us result status StatusSuccess 2026-01-10T12:14:19.623120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D82BF4A4500 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:07.273703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:07.273795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.273846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:07.273898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:07.273937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:07.273964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:07.274020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:07.274096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:07.274948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:07.275285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:07.364145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:07.364207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:07.379005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:07.379089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:07.379284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:07.393004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:07.393251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:07.393988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:07.394199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:07.396376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.396546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:07.397789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:07.397849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:07.397915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:07.397965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:07.398008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:07.398291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:07.530334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.531968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.532038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.532147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.532237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:07.532330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 Leader for TabletID 72057594046678944 is [2:1024:2983] sender: [2:1083:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 2026-01-10T12:14:18.474193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 2026-01-10T12:14:18.526878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 0 2026-01-10T12:14:18.527080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:18.527152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-10T12:14:18.527225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-10T12:14:18.527271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D82BF4B5D00, stats written 1 2026-01-10T12:14:18.527800Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:18.528043Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 273us result status StatusSuccess 2026-01-10T12:14:18.528608Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 168 LastUpdateTime: 168 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables |88.6%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D1D356ABD00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.106017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.106091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.106139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.106175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.106206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.106229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.106295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.106396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.107222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.107524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.197283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.197341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.212976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.213098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.213320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.221268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.221546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.222433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.222660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.224894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.225069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.226141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.226213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.226315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.226368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.226413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.226681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.351762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.352600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.352717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.352795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.352915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.352974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.353577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... 688676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:14:18.688701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:14:18.688725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:14:18.688749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:14:18.688935Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.689112Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.689247Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:18.689301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:14:18.689403Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.689480Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.689856Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-01-10T12:14:18.690350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.690485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.690847Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.690932Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691217Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691366Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691422Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691680Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.691763Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692288Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692535Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692611Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692811Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.692922Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:18.693178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:18.693415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:14:18.698833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:18.703759Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:18.703850Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:18.705043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:18.705109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:18.705162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:18.707766Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1024:2983] sender: [2:1079:2058] recipient: [2:15:2062] 2026-01-10T12:14:18.735369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 2026-01-10T12:14:18.776944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 0 2026-01-10T12:14:18.777131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:18.777183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-10T12:14:18.777268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-10T12:14:18.777319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D3563DD00, stats written 1 2026-01-10T12:14:18.777862Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:18.778067Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 269us result status StatusSuccess 2026-01-10T12:14:18.778633Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 168 LastUpdateTime: 168 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:04.907453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:04.907546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:04.907592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:04.907636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:04.907705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:04.907743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:04.907834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:04.907927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:04.909037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:04.909425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:05.010254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:05.010347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:05.024251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:05.024346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:05.024531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:05.033499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:05.033781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:05.034537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.034801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:05.037278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.037477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:05.038835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:05.038898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.038974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:05.039038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:05.039088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:05.039372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:05.205190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.206927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.207596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:17.394544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:17.394841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:17.394895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:17.395046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:14:17.395210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:17.395250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 107, path id: 38 2026-01-10T12:14:17.395300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 107, path id: 40 2026-01-10T12:14:17.395349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:17.395384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:14:17.395455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:17.395495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:14:17.395553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-10T12:14:17.396680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:14:17.396793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:14:17.396848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:14:17.396893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 14 2026-01-10T12:14:17.396929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:17.397978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:14:17.398037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:14:17.398079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:14:17.398103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-10T12:14:17.398144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-10T12:14:17.398207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-10T12:14:17.400729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:17.400782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:17.401110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:14:17.401265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:14:17.401313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:14:17.401360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:14:17.401395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:14:17.401430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-10T12:14:17.401482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:14:17.401527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:14:17.401560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-10T12:14:17.401650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:14:17.402181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:17.402217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:17.403112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:17.404064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:17.405993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:17.406077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-10T12:14:17.406785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2026-01-10T12:14:17.407412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-10T12:14:17.407480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-10T12:14:17.408058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-10T12:14:17.408193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:14:17.408264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1404:3330] TestWaitNotification: OK eventTxId 107 2026-01-10T12:14:17.408978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:17.409199Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusSuccess 2026-01-10T12:14:17.409673Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TA] {RESULT} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.919601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.919690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.919726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.919771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.919826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.919866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.919922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.920005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.920928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.921275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:12.012699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:12.012775Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:12.028986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:12.029071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:12.029230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:12.038995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:12.039247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:12.040005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:12.040259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:12.042343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:12.042473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:12.043362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:12.043423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:12.043474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:12.043511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:12.043546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:12.043735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:12.196636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.197722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.197869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.197979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:12.198857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... complete, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:17.030731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:17.030799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:17.031007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-10T12:14:17.031185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:17.031260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-10T12:14:17.031324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-10T12:14:17.031872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:17.031933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-10T12:14:17.032056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:17.032144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-10T12:14:17.032191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-10T12:14:17.033122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:17.033237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:17.033294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:17.033332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-10T12:14:17.033378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-10T12:14:17.034235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:17.034328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:17.034359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:17.034407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-10T12:14:17.034442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-10T12:14:17.034514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:14:17.037896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:17.037962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-10T12:14:17.038352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-10T12:14:17.038558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:17.038604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:17.038646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:17.038685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:17.038725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:14:17.038789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1003:2944] message: TxId: 104 2026-01-10T12:14:17.038840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:17.038880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:14:17.038914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:14:17.039018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-10T12:14:17.039684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:17.039725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:17.040399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:17.042118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:17.043056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:17.043115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2814], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-10T12:14:17.043204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:17.043250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1360:3278] 2026-01-10T12:14:17.044144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:17.045158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-10T12:14:17.045361Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 228us result status StatusSuccess 2026-01-10T12:14:17.045971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:09.463068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:09.463148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:09.463199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:09.463244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:09.463292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:09.463321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:09.463375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:09.463451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:09.464284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:09.464572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:09.534530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:09.534591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:09.551405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:09.551510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:09.551707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:09.570824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:09.571088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:09.571854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.572055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:09.574507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.574731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:09.576204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:09.576259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.576332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:09.576377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:09.576424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:09.576699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.584342Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:09.719305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:09.719552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.719757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:09.719819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:09.720046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:09.720154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:09.722447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.722715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:09.722944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.723005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:09.723058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:09.723092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:09.725165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.725230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:09.725268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:09.728278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.728342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.728383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.728446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:09.732805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:09.735845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:09.736032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:09.737479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.737621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:09.737681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.738001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:09.738066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.738251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:09.738338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:09.742572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:09.742619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ts { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1541 } } CommitVersion { Step: 130 TxId: 102 } 2026-01-10T12:14:10.223539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1541 } } CommitVersion { Step: 130 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-10T12:14:10.224356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 504 RawX2: 4294969753 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:14:10.224413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2026-01-10T12:14:10.224571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 504 RawX2: 4294969753 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:14:10.224635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:14:10.224732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 504 RawX2: 4294969753 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:14:10.224813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.224855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.224898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:14:10.224979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:14:10.230723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:10.230900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:10.231102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.231229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.231623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.231697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:14:10.231828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:14:10.231871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:10.231930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:14:10.231968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:10.232014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:14:10.232145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:313:2303] message: TxId: 102 2026-01-10T12:14:10.232227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:10.232281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:14:10.232318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:14:10.232482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-10T12:14:10.237619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:10.237707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:532:2477] TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:10.238340Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:10.238604Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 278us result status StatusSuccess 2026-01-10T12:14:10.239248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:10.240116Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:10.240420Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 311us result status StatusSuccess 2026-01-10T12:14:10.241013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-10T12:12:37.717428Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1768047157717395 2026-01-10T12:12:38.221640Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704723975172250:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:38.221691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:38.285720Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015414s 2026-01-10T12:12:38.416316Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.426244Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704722290959042:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:38.426312Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:38.465405Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.904209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.912651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.988730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.988871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:39.023026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:39.023106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:39.092942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:39.111705Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:39.115292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:39.156736Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:39.163720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:39.166501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:39.304274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:39.453932Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:39.534379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0029f5/r3tmp/yandexVn1muf.tmp 2026-01-10T12:12:39.534419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0029f5/r3tmp/yandexVn1muf.tmp 2026-01-10T12:12:39.534619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0029f5/r3tmp/yandexVn1muf.tmp 2026-01-10T12:12:39.534730Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:39.601568Z INFO: TTestServer started on Port 17982 GrpcPort 9396 PQClient connected to localhost:9396 2026-01-10T12:12:39.842514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:43.228226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704723975172250:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.228336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:43.425148Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704722290959042:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.425237Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:43.609956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745450010076:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.610090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.610302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745450010088:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.610363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745450010090:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.610719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.614952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:43.629906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745450010126:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.630029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.671196Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704745450010092:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:12:43.801723Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704745450010178:3006] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:44.071488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:44.072723Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593704745450010188:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:44.075637Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDAxMTY2NWUtZmU5M2RmZTQtYmRmNzgwMmQtNWU4Y2ZjMTc=, ActorId: [1:7593704745450010073:2331], ActorState: ExecuteState, LegacyTraceId: 01kekx5e6cfp0mj1maxv3nn6n5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:44.078106Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:12:44.089922Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593704743765795762:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table ... 2026-01-10T12:14:06.356149Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.362012Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:14:06.385045Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.385087Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.385108Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.385135Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.385155Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.399784Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-10T12:14:06.410398Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2026-01-10T12:14:06.412368Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:14:06.431749Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1768047246] updating configuration. Deleted partitions []. Added partitions [] 2026-01-10T12:14:06.432351Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:14:06.436135Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:14:06.456261Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.456298Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.456316Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.456340Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.456359Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.459757Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1768047246497, TxId 281474976710676 2026-01-10T12:14:06.459821Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.459844Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-10T12:14:06.459881Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:14:06.459895Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.459915Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:14:06.459928Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.460266Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1768047246497, TxId 281474976710676 2026-01-10T12:14:06.460303Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.460322Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:14:06.460339Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.460376Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:14:06.460553Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976710676 2026-01-10T12:14:06.460583Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:14:06.460599Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:14:06.460620Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.460832Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:14:06.463954Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:14:06.464280Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:14:06.464387Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:14:06.464421Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.464440Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.464455Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.464477Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.464494Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.464525Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:06.464950Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1768047246" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1768047246" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1768047246" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-10T12:14:06.464999Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:06.465153Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:14:06.468017Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:14:06.485459Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.485516Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.485534Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.485557Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.485574Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.487553Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-10T12:14:06.557354Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.557396Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.557416Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.557442Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.557463Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.585730Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:06.585759Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.585772Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:06.585793Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:06.585822Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:06.704637Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [9:7593705100064179493:2478] TxId: 281474976710677. Ctx: { TraceId: 01kekx7yts8m37k3kkpf3a6abc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ODY3MTY0NTMtMjI4MTFhN2YtYmM1MzIyYjQtNWRhZGM2Y2I=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-01-10T12:14:06.704796Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [9:7593705100064179497:2478], TxId: 281474976710677, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx7yts8m37k3kkpf3a6abc. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=ODY3MTY0NTMtMjI4MTFhN2YtYmM1MzIyYjQtNWRhZGM2Y2I=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [9:7593705100064179493:2478], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:08.954291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:08.954411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:08.954470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:08.954523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:08.954598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:08.954661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:08.954734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:08.954851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:08.955898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:08.956335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:09.082061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:09.082143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:09.099761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:09.099907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:09.100165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:09.117629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:09.117920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:09.118800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.119227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:09.126763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.126993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:09.128322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:09.128395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:09.128464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:09.128530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:09.128584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:09.128855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.136148Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:09.308012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:09.308331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.308571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:09.308638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:09.308942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:09.309038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:09.312136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.312355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:09.312569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.312625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:09.312679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:09.312726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:09.314788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.314854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:09.314890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:09.316940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.317002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:09.317054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.317122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:09.332436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:09.336204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:09.336461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:09.337870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:09.338040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:09.338107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.338451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:09.338526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:09.338753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:09.338855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:09.341777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:09.341849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:09.537446Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:09.537756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:14:09.537891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:14:09.538212Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2026-01-10T12:14:09.539337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:09.539516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2026-01-10T12:14:09.540214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:09.540402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2026-01-10T12:14:09.541577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:09.541655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:09.541834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:09.542745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:09.542800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:09.542873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:09.546188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:09.546255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:14:09.546495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-10T12:14:09.547315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:09.547356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:09.549370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:09.549403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:09.549465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:14:09.549485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:14:09.549570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-10T12:14:09.549635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:09.549660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:09.550058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:09.550119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:09.550568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:14:09.551814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:09.551959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-10T12:14:09.552315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:09.552373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-10T12:14:09.552471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:09.552524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:09.553095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:09.553226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:09.553270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:591:2506] 2026-01-10T12:14:09.553519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:09.553626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:09.553655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:591:2506] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:09.554147Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:09.554383Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 267us result status StatusPathDoesNotExist 2026-01-10T12:14:09.554623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:09.555115Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:09.555359Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 208us result status StatusSuccess 2026-01-10T12:14:09.555914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:10.468536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:10.468662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.468708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:10.468753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:10.468809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:10.468872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:10.468943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.469028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:10.469919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:10.470228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:10.569554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:10.569624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:10.586744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:10.586876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:10.587084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:10.595405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:10.595670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:10.596546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.596804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:10.599413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:10.599619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:10.600965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.601028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:10.601113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:10.601163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:10.601213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:10.601470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.612415Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:10.745648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:10.745884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.746088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:10.746141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:10.746373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:10.746440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:10.748928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.749161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:10.749416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.749475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:10.749552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:10.749600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:10.751865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.751928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:10.751974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:10.754555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.754616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.754666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.754725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:10.759281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:10.762044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:10.762290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:10.763573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:10.763740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:10.763803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.764236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:10.764308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:10.764516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:10.764606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:10.769760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.769830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... nge state for txid 101:0 128 -> 240 2026-01-10T12:14:10.950974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-10T12:14:10.951149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:10.951213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-10T12:14:10.951268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:14:10.954487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:10.954534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:10.954705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:14:10.954818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:10.954883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:14:10.954949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-01-10T12:14:10.955147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:10.955192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:14:10.955295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:10.955330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:10.955404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:14:10.955443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:10.955481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:14:10.955527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:14:10.955596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:14:10.955643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:14:10.955927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2026-01-10T12:14:10.955977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-10T12:14:10.956014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-10T12:14:10.956054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-10T12:14:10.957449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:10.957563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:10.957607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:10.957656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-10T12:14:10.957699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:10.958956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:10.959079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:14:10.959114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:14:10.959144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-10T12:14:10.959182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-10T12:14:10.959254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:14:10.962332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:14:10.963605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-10T12:14:10.963974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:10.964039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:14:10.964177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:14:10.964219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:14:10.964701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:10.964843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:14:10.964894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:10.964934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:595:2512] 2026-01-10T12:14:10.965125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:10.965152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:595:2512] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:10.965576Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:10.965774Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 234us result status StatusSuccess 2026-01-10T12:14:10.966381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:01.409302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:01.409410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.409470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:01.409516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:01.409554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:01.409586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:01.409638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:01.409738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:01.410645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:01.410963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:01.504329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:01.504390Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:01.529593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:01.536579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:01.536797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:01.566165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:01.566365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:01.567106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:01.567567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:01.577917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.578126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:01.579446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:01.579517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:01.579682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:01.579755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:01.579813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:01.580098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:01.743624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.747970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:01.748433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... -10T12:14:15.956488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:15.956543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:15.959505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-10T12:14:15.959763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:15.959864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-10T12:14:15.959921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-10T12:14:15.960658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:15.960726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-10T12:14:15.960815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:15.960859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-10T12:14:15.960902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-10T12:14:15.961685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:15.961807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:15.961838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:15.961870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 14 2026-01-10T12:14:15.961902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-10T12:14:15.962794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:15.962865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-10T12:14:15.962893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-10T12:14:15.962914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-10T12:14:15.962935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-10T12:14:15.962981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:14:15.966026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-10T12:14:15.966091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-10T12:14:15.966473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-10T12:14:15.966678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:15.966718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:15.966754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:14:15.966786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:15.966820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:14:15.966880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1001:2942] message: TxId: 104 2026-01-10T12:14:15.966925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:14:15.966960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:14:15.966991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:14:15.967090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-10T12:14:15.967536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-10T12:14:15.967577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-10T12:14:15.968427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:15.969546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-10T12:14:15.970851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-10T12:14:15.970906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-10T12:14:15.971324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:14:15.971369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2032:3942] 2026-01-10T12:14:15.971866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-10T12:14:15.975579Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-10T12:14:15.975805Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 257us result status StatusSuccess 2026-01-10T12:14:15.976400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |88.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D4DF50AF900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:05.292605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:05.292702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.292764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:05.292815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:05.292856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:05.292897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:05.292960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.293045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:05.293915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:05.294208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:05.396657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:05.396725Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:05.430756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:05.430873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:05.431155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:05.446810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:05.447068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:05.447847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.448134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:05.451217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.451401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:05.452656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:05.452717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.452818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:05.452878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:05.452923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:05.453271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:05.609577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.610603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.610721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.610794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.610893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.610962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.611658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... eshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 2026-01-10T12:14:16.068652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 0 2026-01-10T12:14:16.068860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:16.068955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-10T12:14:16.069011Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-10T12:14:16.069066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D4DF5008100, stats written 1 2026-01-10T12:14:16.069569Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:16.069846Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 310us result status StatusSuccess 2026-01-10T12:14:16.070389Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 151 LastUpdateTime: 151 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 162304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:05.553331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:05.553450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.553511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:05.553560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:05.553615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:05.553646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:05.553721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:05.553823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:05.554814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:05.555213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:05.653048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:05.653107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:05.673351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:05.673497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:05.673743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:05.686086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:05.686356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:05.687140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:05.687353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:05.691240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.691433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:05.692939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:05.693012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:05.693088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:05.693134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:05.693183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:05.693417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:05.891768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.892892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.893908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:05.894003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 196544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:14:08.196606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:14:08.196654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:14:08.196690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:14:08.197720Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:08.197817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:08.197852Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:14:08.197897Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:14:08.198345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:08.206001Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:08.206139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:08.206185Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:14:08.206226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:14:08.206278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:08.206402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:14:08.209164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:14:08.209310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:08.209365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:08.209409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:14:08.210482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:08.211570Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-10T12:14:08.212439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:08.212780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:14:08.213939Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:14:08.214139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:08.214374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:14:08.215413Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:14:08.215612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:14:08.215801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409548 2026-01-10T12:14:08.216671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:08.216717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:08.216837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 Forgetting tablet 72075186233409547 2026-01-10T12:14:08.218105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:08.218152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:14:08.218217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:08.219085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:08.222893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:08.222962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:08.232513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:08.232570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:08.232689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:08.232749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:08.232822Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:08.233034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:14:08.233371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:14:08.233433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:14:08.233906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:14:08.234032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:08.234081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:924:2879] TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:08.234670Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:08.234870Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusPathDoesNotExist 2026-01-10T12:14:08.235062Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2026-01-10T12:13:43.505587Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705000888744998:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:43.505663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:43.794341Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:43.806793Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:44.216827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.216891Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:44.216212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.535370Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.538742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:44.598700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:44.598809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:44.602160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:44.602263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:44.636529Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:44.636689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:44.681845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:44.842192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:44.842311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:44.841394Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:44.844948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:44.972330Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:45.004665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003642/r3tmp/yandexH5s7Yh.tmp 2026-01-10T12:13:45.004693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003642/r3tmp/yandexH5s7Yh.tmp 2026-01-10T12:13:45.004927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003642/r3tmp/yandexH5s7Yh.tmp 2026-01-10T12:13:45.005115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:45.044868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:45.107933Z INFO: TTestServer started on Port 5939 GrpcPort 25823 PQClient connected to localhost:25823 2026-01-10T12:13:45.451898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:45.686125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:48.508218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705000888744998:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:48.508293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:49.294380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705026658550191:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.294446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705026658550204:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.294587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.295159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705026658550207:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.295263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.300250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:49.300544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705026658550221:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.300670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.302465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705026658550240:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.302554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:49.344317Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705026658550206:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:13:49.656585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705026658550284:3061] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:13:49.737524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:49.928948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:50.002974Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705026658550301:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:50.003649Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MmZkZTBlMzItOTA4ODg0ZTAtMzk4NmE4MjQtYmZhODUyYTM=, ActorId: [1:7593705026658550189:2332], ActorState: ExecuteState, LegacyTraceId: 01kekx7ea73ggr1sb8fk4rvce2, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table pa ... : partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.301139Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301149Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.301165Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:19.301194Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301207Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.301227Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.301493Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:19.301514Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301522Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.301546Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301555Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-10T12:14:19.301579Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:19.301589Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301598Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.301610Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.301618Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-10T12:14:19.302014Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:19.302032Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302041Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.302054Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302061Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:19.302060Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:19.302078Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302085Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:19.302088Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.302095Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302104Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302104Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.302114Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.302115Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.302127Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:19.400957Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401004Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401018Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.401040Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401055Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-10T12:14:19.401112Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401125Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401134Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.401147Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401157Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-10T12:14:19.401415Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401435Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401472Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.401487Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401498Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.401528Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401541Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401551Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.401567Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401578Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.401892Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401915Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401926Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.401938Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401946Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-10T12:14:19.401970Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:19.401982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.401992Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.402006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402017Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-10T12:14:19.402384Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:19.402406Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402407Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:19.402418Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.402423Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402434Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402435Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.402443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:19.402449Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402460Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:19.402472Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:19.402484Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402494Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:19.402505Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:19.402515Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink |88.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd |88.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D30367A1D00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:11.548950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:11.549048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.549100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:11.549166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:11.549205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:11.549259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:11.549319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:11.549410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:11.550174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:11.550461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.648054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.648153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.665422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.665561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.665775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.674790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.675053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.675832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.676048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.678204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.678339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.679279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.679326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.679393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.679427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.679458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.679714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.822216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.823937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.824044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.824154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.824216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.824281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.170333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-01-10T12:14:22.170837Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.170951Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171260Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171317Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171533Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171764Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.171921Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172422Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172651Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172715Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172762Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172879Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172920Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.172969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.173183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:22.173386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:14:22.177832Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:22.181597Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:22.181666Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:22.182647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:22.182706Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:22.182766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:22.183683Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 Leader for TabletID 72057594046678944 is [2:875:2834] sender: [2:930:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 2026-01-10T12:14:22.243302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 0 2026-01-10T12:14:22.243466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:22.243522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-10T12:14:22.243596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-10T12:14:22.243642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D3036834100, stats written 1 2026-01-10T12:14:22.244060Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:22.244349Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 308us result status StatusSuccess 2026-01-10T12:14:22.245166Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 166 LastUpdateTime: 166 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TSchemeShardSubDomainTest::LS [GOOD] |88.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |88.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.066480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.066575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.066635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.066690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.066745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.066786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.066850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.066945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.067834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.068198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.162703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.162770Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.176648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.182107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.182347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.190064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.190297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.191129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.191570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.195199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.195422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.196801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.196872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.196999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.197057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.197122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.197388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.204754Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:23.418440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:23.418695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.418906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:23.418966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:23.419219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:23.419306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:23.422299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.422525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:23.422774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.422846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:23.422901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:23.422939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:23.425364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.425439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:23.425485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:23.427599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.427653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.427702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.427768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:23.432006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:23.437001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:23.437220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:23.438357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.438502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:23.438561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.439134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:23.439209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.439391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:23.439472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:23.441868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.441917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... meshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:14:23.553411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.553445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:23.553502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-10T12:14:23.553575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.553621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:23.553729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:23.553796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:23.553851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:23.553888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:23.553932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:23.553975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:23.554018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:23.554062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:23.554384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-10T12:14:23.554443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-10T12:14:23.554480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-10T12:14:23.554516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-10T12:14:23.555654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:23.555767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:23.555805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:23.555849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-10T12:14:23.555920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:23.556516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:23.556607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:23.556643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:23.556673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-10T12:14:23.556714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-10T12:14:23.556781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-10T12:14:23.560917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:23.561362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-10T12:14:23.561629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:23.561695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-10T12:14:23.562106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:23.562211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:23.562247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:454:2410] TestWaitNotification: OK eventTxId 100 2026-01-10T12:14:23.562714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:23.562976Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 291us result status StatusSuccess 2026-01-10T12:14:23.563500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:23.564265Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:23.564517Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 439us result status StatusSuccess 2026-01-10T12:14:23.564983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:22.743419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:22.743484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:22.743520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:22.743556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:22.743583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:22.743601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:22.743649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:22.743711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:22.744320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:22.744544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:22.818743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:22.818817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:22.837333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:22.837432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:22.837634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:22.847101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:22.847386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:22.848124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:22.848437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:22.850770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:22.850958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:22.852313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:22.852390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:22.852465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:22.852522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:22.852569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:22.852827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:22.859690Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:23.011564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:23.011769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.011923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:23.011960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:23.012433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:23.012524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:23.014818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.015027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:23.015264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.015344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:23.015398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:23.015447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:23.018250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.018313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:23.018353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:23.021100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.021171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.021222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.021290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:23.024954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:23.035145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:23.035386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:23.036741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.036904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:23.036971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.037292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:23.037377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.037576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:23.037684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:23.041836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.041942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2026-01-10T12:14:24.111503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:24.111620Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:14:24.117312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:14:24.117839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2026-01-10T12:14:24.118462Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:274:2263] Bootstrap 2026-01-10T12:14:24.119660Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:274:2263] Become StateWork (SchemeCache [2:279:2268]) 2026-01-10T12:14:24.122943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:24.123239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.123350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2026-01-10T12:14:24.124799Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:274:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:14:24.134688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:24.134969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2026-01-10T12:14:24.135362Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2026-01-10T12:14:24.138971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:24.139364Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.139487Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2026-01-10T12:14:24.144504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:24.144825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-10T12:14:24.145216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:24.145268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-10T12:14:24.145372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:24.145399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:14:24.145876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:24.145998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:24.146352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.146400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:293:2282] 2026-01-10T12:14:24.146573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.146600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:293:2282] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-10T12:14:24.147082Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.147284Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 256us result status StatusPathDoesNotExist 2026-01-10T12:14:24.147485Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:24.148029Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.148239Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 221us result status StatusPathDoesNotExist 2026-01-10T12:14:24.148380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:24.148935Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.149104Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2026-01-10T12:14:24.149544Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas |88.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.348873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.348996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.349044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.349092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.349139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.349185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.349243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.349338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.350319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.350668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.549943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.550010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.578573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.587552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.587808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.600869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.601109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.602018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.602519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.606965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.607222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.608762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.608849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.609018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.609081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.609133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.609414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.816221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.820737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.820941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.821990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oordinatorID: 72057594046316545 2026-01-10T12:14:24.685666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-10T12:14:24.685840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-10T12:14:24.686278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:24.686413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:24.686489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-10T12:14:24.686820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-10T12:14:24.686909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-10T12:14:24.687111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:24.687189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:24.687249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-01-10T12:14:24.689852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:24.689915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:24.690172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:24.690348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:24.690408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-10T12:14:24.690472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-10T12:14:24.690785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.690843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-10T12:14:24.690956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:24.691009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:24.691075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-10T12:14:24.691129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:24.691193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-10T12:14:24.691306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-10T12:14:24.691360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-10T12:14:24.691416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-10T12:14:24.691492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:14:24.691540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-01-10T12:14:24.691580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:14:24.691617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:14:24.693204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:24.693303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:24.693348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:24.693393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:14:24.693461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:14:24.694281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:24.694383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-10T12:14:24.694418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-10T12:14:24.694460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:14:24.694496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:14:24.694587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-01-10T12:14:24.694670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:685:2675] 2026-01-10T12:14:24.698242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:24.698414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-10T12:14:24.698490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.698525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:686:2676] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2026-01-10T12:14:24.699175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.699421Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 255us result status StatusSuccess 2026-01-10T12:14:24.699995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.9%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.171940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.172039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.172096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.172139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.172183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.172209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.172255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.172338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.173166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.173477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.266536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.266605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.280038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.284976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.285210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.292426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.292650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.293347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.293750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.297121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.297298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.298531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.298594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.298716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.298760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.298802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.299037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.309087Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:23.448365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:23.448650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.448850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:23.448908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:23.449154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:23.449243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:23.456927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.457154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:23.457370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.457442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:23.457492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:23.457525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:23.464996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.465084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:23.465138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:23.470527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.470589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.470636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.470702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:23.474638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:23.477003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:23.477240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:23.478511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.478651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:23.478730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.479019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:23.479103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.479266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:23.479345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:23.489378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.489446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... shard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 614 RawX2: 4294969849 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-10T12:14:24.234172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2026-01-10T12:14:24.234277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 614 RawX2: 4294969849 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-10T12:14:24.234315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:14:24.234376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 614 RawX2: 4294969849 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-10T12:14:24.234418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:24.234443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-10T12:14:24.234488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:14:24.234518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:2 129 -> 240 2026-01-10T12:14:24.237621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:24.241207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:24.241339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:24.241389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:14:24.241472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.241607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-10T12:14:24.241678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.242097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-10T12:14:24.242251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.242292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-10T12:14:24.242400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2026-01-10T12:14:24.242439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-10T12:14:24.242483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2026-01-10T12:14:24.242516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-10T12:14:24.242564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2026-01-10T12:14:24.242954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-10T12:14:24.242989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:2 ProgressState 2026-01-10T12:14:24.243043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2026-01-10T12:14:24.243064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-10T12:14:24.243095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2026-01-10T12:14:24.243117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-10T12:14:24.243153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2026-01-10T12:14:24.243221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2436] message: TxId: 107 2026-01-10T12:14:24.243266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-10T12:14:24.243308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:14:24.243338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-10T12:14:24.243465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-10T12:14:24.243508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-10T12:14:24.243526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-10T12:14:24.243554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-10T12:14:24.243573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-10T12:14:24.243590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-10T12:14:24.243633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-10T12:14:24.247043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.247092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:541:2490] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-10T12:14:24.251455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:24.252036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2026-01-10T12:14:24.252166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-10T12:14:24.252210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-10T12:14:24.254451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:24.254702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-10T12:14:24.255176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-10T12:14:24.255224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-10T12:14:24.255715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-10T12:14:24.255802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.255851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:732:2652] TestWaitNotification: OK eventTxId 108 |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:22.909538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:22.909632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:22.909690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:22.909743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:22.909777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:22.909805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:22.909864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:22.909955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:22.910763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:22.911080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.006792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.006863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.043507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.043612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.043809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.067723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.067974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.068744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.069000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.071346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.071529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.073013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.073076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.073145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.073194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.073237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.073497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.080857Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:23.258132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:23.258413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.258635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:23.258702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:23.258978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:23.259048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:23.261644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.261879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:23.262145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.262234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:23.262283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:23.262320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:23.264755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.264833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:23.264876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:23.267043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.267101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.267150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.267224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:23.271252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:23.273692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:23.273965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:23.275252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.275399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:23.275468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.275826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:23.275908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:23.276113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:23.276198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:23.278484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.278533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:24.856181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:14:24.856258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:14:24.856448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.856501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:14:24.856634Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:14:24.856682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:24.856738Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:14:24.856781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:24.856828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:14:24.856892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:14:24.856965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:14:24.857009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:14:24.857297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2026-01-10T12:14:24.857358Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:14:24.857404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:14:24.857438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2026-01-10T12:14:24.858399Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:24.858475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:24.858521Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:14:24.858556Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:14:24.858598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:14:24.859449Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:24.859512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:14:24.859534Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:14:24.859557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-10T12:14:24.859582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2026-01-10T12:14:24.859636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:14:24.864428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:14:24.866210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:14:24.866564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:14:24.866613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:14:24.867150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:14:24.867275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:24.867324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:972:2806] TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:24.867860Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.868045Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusSuccess 2026-01-10T12:14:24.868561Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:24.869376Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:24.869571Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 214us result status StatusSuccess 2026-01-10T12:14:24.870026Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |89.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |89.0%| [LD] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectAll |89.0%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D13271A5900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.067049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.067149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.067192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.067238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.067272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.067300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.067368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.067488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.068365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.068696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.156576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.156635Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.173756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.173871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.174080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.204642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.204946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.205723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.205995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.208702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.208912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.210289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.210363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.210452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.210514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.210577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.210892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.424794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.425952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.426962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.427041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.427114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:14:26.570346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:14:26.570370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:14:26.570397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:14:26.570419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:14:26.570443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:14:26.570602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.570692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.570819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.570875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-10T12:14:26.570987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.571072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.571580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-01-10T12:14:26.572127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.572268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.572675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.572755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.573642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.574738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.575043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:14:26.580484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:26.586294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.586377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.586694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:26.586754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:26.586799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:26.586933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:968:2928] sender: [1:1021:2058] recipient: [1:15:2062] 2026-01-10T12:14:26.619470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 2026-01-10T12:14:26.676441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 0 2026-01-10T12:14:26.676730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:26.676819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-10T12:14:26.676876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-10T12:14:26.676926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D13270E0100, stats written 1 2026-01-10T12:14:26.677529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:26.677803Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 292us result status StatusSuccess 2026-01-10T12:14:26.678390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 150 LastUpdateTime: 150 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:26.464946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:26.465034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.465076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:26.465138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:26.465193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:26.465224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:26.465302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.465381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:26.466173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:26.466488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:26.561904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:26.561985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:26.582442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:26.582573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:26.582771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:26.600394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:26.600697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:26.601506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.601760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:26.604447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.604646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:26.605971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.606043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.606118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:26.606163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:26.606231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:26.606475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.614289Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:26.756296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:26.756594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.756804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:26.756860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:26.757124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:26.757184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:26.759525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.759728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:26.759978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.760041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:26.760103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:26.760139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:26.762196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.762255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:26.762289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:26.764117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.764169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.764210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:26.764269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:26.767816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:26.772632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:26.772900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:26.774183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.774318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:26.774375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:26.774663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:26.774721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:26.774893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:26.774991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:26.780271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.780323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-10T12:14:26.943998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-10T12:14:26.947029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:14:26.947079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-10T12:14:26.947175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-10T12:14:26.947244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-10T12:14:26.947830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:14:26.947858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:14:26.947932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:26.948043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:26.948060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:14:26.948378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:26.948427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:14:26.948517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:26.949741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:14:26.949780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:14:26.949867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:14:26.949887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:14:26.949952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-10T12:14:26.950047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:14:26.950093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:14:26.950280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:26.950415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:14:26.953123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-01-10T12:14:26.953302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-10T12:14:26.953330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-10T12:14:26.953382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:14:26.953405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:14:26.953442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:14:26.953477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:14:26.953975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-10T12:14:26.954116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-10T12:14:26.954151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:605:2519] 2026-01-10T12:14:26.954276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:14:26.954365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:14:26.954433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:14:26.954455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:605:2519] 2026-01-10T12:14:26.954548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:14:26.954569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:605:2519] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-10T12:14:26.955019Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:26.955188Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusPathDoesNotExist 2026-01-10T12:14:26.955357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:26.955925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:26.956118Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 196us result status StatusPathDoesNotExist 2026-01-10T12:14:26.956264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:14:26.956767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:26.957014Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 222us result status StatusSuccess 2026-01-10T12:14:26.957501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D5B562A3100 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:10.981393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:10.981473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.981512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:10.981554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:10.981593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:10.981629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:10.981707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:10.981788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:10.982635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:10.982930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:11.078279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:11.078340Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:11.098072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:11.098208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:11.098459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:11.108628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:11.108907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:11.109611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:11.109788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:11.111892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.112167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:11.113282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:11.113331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:11.113388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:11.113428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:11.113477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:11.113644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:11.275685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.276938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:11.277818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... _SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.0845 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 0 2026-01-10T12:14:26.753571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:26.753634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:26.753835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 2026-01-10T12:14:26.772302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 2026-01-10T12:14:26.797153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 2026-01-10T12:14:26.797328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 1 2026-01-10T12:14:26.797393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:26.797444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:26.797606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 2 2026-01-10T12:14:26.797901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-10T12:14:26.797939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:26.798032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D5B56309900, stats written 3 2026-01-10T12:14:26.798716Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:26.813205Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 225us result status StatusSuccess 2026-01-10T12:14:26.813836Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInTargetTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::SplittedTargetTopic [GOOD] >> TPartitionTests::Batching >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous >> TPartitionTests::Batching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007DFFC12A1D00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.151331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.151430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.151468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.151510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.151545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.151571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.151636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.151741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.152597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.152923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.254527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.254594Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.270528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.270637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.270855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.279466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.279733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.280510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.280728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.282922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.283083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.284261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.284318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.284399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.284450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.284490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.284773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.431859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.432813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.432936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.433708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 Leader for TabletID 72057594046678944 is [1:905:2854] sender: [1:981:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 2026-01-10T12:14:28.070076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 2026-01-10T12:14:28.070268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 0 2026-01-10T12:14:28.070380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-10T12:14:28.070443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:28.070718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 1 2026-01-10T12:14:28.071137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-10T12:14:28.071203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-10T12:14:28.071316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DFFC13D2D00, stats written 2 2026-01-10T12:14:28.071984Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:28.082752Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 318us result status StatusSuccess 2026-01-10T12:14:28.083383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TPartitionTests::ChangeConfig >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.997641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.997749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.997790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.997839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.997901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.997936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.998011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.998111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.999029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.999368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:24.196260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:24.196348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:24.217489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:24.217597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:24.217824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:24.239957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:24.240233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:24.244495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:24.244798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:24.250818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:24.251030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:24.252362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:24.252447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:24.252518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:24.252571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:24.252622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:24.252942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:24.457086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.458976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:24.459493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 12:14:28.314146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:28.314538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:28.314595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:28.314814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:14:28.315024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:28.315075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:14:28.315127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:14:28.315208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:28.315269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:14:28.315379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:28.315426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:14:28.315477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-10T12:14:28.317430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:28.317562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:28.317603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:28.317654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-10T12:14:28.317704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:28.319813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:28.319923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:28.319955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:28.319992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:14:28.320045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:14:28.320155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:14:28.322772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:28.322864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:28.323322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:14:28.323560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:28.323604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:28.323643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:28.323680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:28.323722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:14:28.323798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:824:2791] message: TxId: 103 2026-01-10T12:14:28.323850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:28.323896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:14:28.323933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:14:28.324042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:14:28.324730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:28.324789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:28.325905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:28.328826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:28.330116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:28.330176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-10T12:14:28.330300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:28.330345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1142:3076] 2026-01-10T12:14:28.331203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:28.332950Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:28.333216Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 294us result status StatusSuccess 2026-01-10T12:14:28.333726Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TPartitionTests::ChangeConfig [GOOD] |89.0%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:26.250458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:26.250579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.250625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:26.250666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:26.250707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:26.250731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:26.250779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.250882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:26.251728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:26.252048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:26.340982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:26.341045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:26.355192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:26.359915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:26.360129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:26.366921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:26.367114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:26.367801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.368200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:26.371445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.371631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:26.372837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.372897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.373015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:26.373063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:26.373106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:26.373314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.528511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.529603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.529722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.529787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.529871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.529947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.530644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... athId: 38] was 11 2026-01-10T12:14:29.603026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 3 2026-01-10T12:14:29.605110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 46, at schemeshard: 72057594046678944 2026-01-10T12:14:29.605353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2026-01-10T12:14:29.605656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:29.605719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:29.605968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 46] 2026-01-10T12:14:29.606064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:29.606402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1437:3300], at schemeshard: 72057594046678944, txId: 137, path id: 38 2026-01-10T12:14:29.606464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1437:3300], at schemeshard: 72057594046678944, txId: 137, path id: 46 2026-01-10T12:14:29.606546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.606597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2026-01-10T12:14:29.606823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-10T12:14:29.608001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2026-01-10T12:14:29.608140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2026-01-10T12:14:29.608197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-10T12:14:29.608238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18 2026-01-10T12:14:29.608284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 12 2026-01-10T12:14:29.609497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-10T12:14:29.609576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-10T12:14:29.609605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-10T12:14:29.609633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 46], version: 1 2026-01-10T12:14:29.609660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 4 2026-01-10T12:14:29.609725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2026-01-10T12:14:29.612407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2026-01-10T12:14:29.612555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2026-01-10T12:14:29.612598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-10T12:14:29.613026Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-10T12:14:29.613224Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2026-01-10T12:14:29.613366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-10T12:14:29.613411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-10T12:14:29.613551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-10T12:14:29.613597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-10T12:14:29.613662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-10T12:14:29.613747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 137:0 2 -> 3 2026-01-10T12:14:29.614089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-10T12:14:29.616352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-10T12:14:29.617832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.618030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.618088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2026-01-10T12:14:29.618174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2026-01-10T12:14:29.618548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1382 RawX2: 4294970552 } TxBody: "\n\236\004\n\007Table11\020.\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020.:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 38 2026-01-10T12:14:29.622990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2026-01-10T12:14:29.623165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> TPartitionTests::BlobKeyFilfer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:26.519105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:26.519203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.519262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:26.519323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:26.519372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:26.519435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:26.519508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.519630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:26.520604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:26.520968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:26.611373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:26.611438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:26.627235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:26.627352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:26.627541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:26.636842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:26.637166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:26.637986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.638257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:26.640833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.641027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:26.642385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.642457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.642567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:26.642622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:26.642681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:26.642962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.810116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.814964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.815010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-10T12:14:29.993497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 240 -> 240 2026-01-10T12:14:29.997248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.997324Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-10T12:14:29.997452Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:14:29.997492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:14:29.997538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:14:29.997583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:14:29.997624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-10T12:14:29.997712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:649:2571] message: TxId: 106 2026-01-10T12:14:29.997768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:14:29.997814Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-10T12:14:29.997850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-10T12:14:29.998001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-10T12:14:29.998044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-10T12:14:30.001154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:14:30.001228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:828:2725] TestWaitNotification: OK eventTxId 106 2026-01-10T12:14:30.001918Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:30.002166Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 278us result status StatusSuccess 2026-01-10T12:14:30.002601Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:30.003251Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:30.003459Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 222us result status StatusSuccess 2026-01-10T12:14:30.003850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:30.008721Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:30.008949Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 259us result status StatusSuccess 2026-01-10T12:14:30.009409Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::BlobKeyFilfer [GOOD] Test command err: 2026-01-10T12:14:28.945078Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.083150Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:29.083252Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:29.083318Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:29.083407Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:14:29.101960Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-10T12:14:29.102934Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:14:29.129185Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:14:29.174578Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.197283Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-1 actual is session-id-2 2026-01-10T12:14:29.197349Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-3 actual is session-id-2 2026-01-10T12:14:29.197556Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:14:29.208420Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.764691Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.876478Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:29.876574Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:29.876621Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:29.876669Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:14:29.892337Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:181:2194] 2026-01-10T12:14:29.895055Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:14:29.000000Z 2026-01-10T12:14:29.895300Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:181:2194] 2026-01-10T12:14:29.918920Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.964790Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.985727Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:29.996271Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:30.037701Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:30.079426Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:30.100212Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send change config Wait cmd write (initial) Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\251\321\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:14:30.242725Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:30.263516Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\251\321\276\27238\001" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-2" IncludeFrom: true To: "c0000000003Sclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-2|0000000000000000" IncludeFrom: true To: "c0000000003wclient-2|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-1@\000\252\002\014\n\010client-3@\007" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\251\321\276\27238\001" StorageChannel: INLINE } Wait config changed 2026-01-10T12:14:30.326707Z node 2 :PERSQUEUE WARN: partition.cpp:2983: [72057594037927937][Partition][3][StateIdle] Partition 3 Unknown consumer 'client-2' 2026-01-10T12:14:31.042386Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:31.123934Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:31.123997Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:31.124040Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:31.124110Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:14:31.142367Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [3:183:2196] 2026-01-10T12:14:31.143384Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:183:2196] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous >> KqpScan::ScanDuringSplit10 >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous >> GenericFederatedQuery::YdbManagedSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007DFDF0AAA900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.453336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.453427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.453486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.453531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.453572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.453627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.453687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.453780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.454641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.454955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.538870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.538927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.553616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.553723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.553926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.562397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.562694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.563679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.563914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.566480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.566677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.568001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.568061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.568205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.568261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.568312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.568645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.748928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.749686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.749793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.749856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.749901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.749954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.750484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 Leader for TabletID 72057594046678944 is [2:973:2933] sender: [2:1030:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 2026-01-10T12:14:32.001690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 2026-01-10T12:14:32.044204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 0 2026-01-10T12:14:32.044482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-10T12:14:32.044568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-10T12:14:32.044645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-10T12:14:32.044710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DFDF0A41900, stats written 1 2026-01-10T12:14:32.045382Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:32.045698Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 360us result status StatusSuccess 2026-01-10T12:14:32.046291Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 152 LastUpdateTime: 152 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-10T12:12:37.315108Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1768047157315069 2026-01-10T12:12:37.688023Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704719319498025:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.688109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.707457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:37.797507Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:37.859879Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704718356970513:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:37.864226Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:37.918927Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.170038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.181077Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.343048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.351206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.428795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.428889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.429752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.429800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.451049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.486039Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:38.491027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.628693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:38.705702Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:38.825104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:38.859970Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:38.899397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:38.900108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002a04/r3tmp/yandexKJkuZv.tmp 2026-01-10T12:12:38.900120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002a04/r3tmp/yandexKJkuZv.tmp 2026-01-10T12:12:38.900328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002a04/r3tmp/yandexKJkuZv.tmp 2026-01-10T12:12:38.900412Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:38.970842Z INFO: TTestServer started on Port 1470 GrpcPort 13657 PQClient connected to localhost:13657 2026-01-10T12:12:39.222821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:42.066844Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704739831807236:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:42.066942Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704739831807247:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:42.067004Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:42.076146Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704739831807251:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:42.077180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:42.082028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:42.148364Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593704739831807250:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:12:42.362479Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593704739831807280:2140] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:42.694243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704719319498025:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:42.694336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:42.717764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:42.719791Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593704740794335808:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:42.721646Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODU5OTEzMTAtOTdhZmY0ZTgtNDI3Y2M2NGQtNGIxY2RmOGQ=, ActorId: [1:7593704740794335767:2329], ActorState: ExecuteState, LegacyTraceId: 01kekx5cwzdpdcfgtezm5s7tz4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:42.723740Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593704739831807295:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:42.724513Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NWRmM2NjYTItN2IwYmRjZWMtNThiYmY2NzktOGU4YTY1ODE=, ActorId: [2:7593704739831807234:2302], ActorState: ExecuteState, LegacyTraceId: 01kekx5cpf9tdw0jqbz792yay0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:42.730898Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severi ... lobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551388 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-10T12:14:28.685626Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-10T12:14:28.685703Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid e841abaa-1de141af-3c95c936-991f0fdf has messages 1 2026-01-10T12:14:28.685897Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 read done: guid# e841abaa-1de141af-3c95c936-991f0fdf, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2026-01-10T12:14:28.685941Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 response to read: guid# e841abaa-1de141af-3c95c936-991f0fdf 2026-01-10T12:14:28.686296Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 Process answer. Aval parts: 0 2026-01-10T12:14:28.687058Z :DEBUG: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:28.687404Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-10T12:14:28.687377Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 grpc read done: success# 1, data# { read { } } 2026-01-10T12:14:28.687577Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:14:28.687542Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 got read request: guid# b0cf64ef-b5b60244-8601a289-9b0ddbd 2026-01-10T12:14:28.687639Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:14:28.687672Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-10T12:14:28.687699Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-10T12:14:28.687758Z :DEBUG: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-10T12:14:28.687978Z :INFO: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] Closing read session. Close timeout: 0.000000s 2026-01-10T12:14:28.688049Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-10T12:14:28.688128Z :INFO: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 56 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:14:28.688266Z :NOTICE: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:14:28.688329Z :DEBUG: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] [null] Abort session to cluster 2026-01-10T12:14:28.688954Z :NOTICE: [/Root] [/Root] [19388fc2-c86c66d-103cea40-ef59bc6a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:14:28.689208Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0] Write session: close. Timeout = 0 ms 2026-01-10T12:14:28.689260Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0] Write session will now close 2026-01-10T12:14:28.689322Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0] Write session: aborting 2026-01-10T12:14:28.689846Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:14:28.689796Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 grpc read done: success# 0, data# { } 2026-01-10T12:14:28.689905Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0] Write session: destroy 2026-01-10T12:14:28.689827Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 grpc read failed 2026-01-10T12:14:28.689860Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 grpc closed 2026-01-10T12:14:28.689912Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_14232267640557214613_v1 is DEAD 2026-01-10T12:14:28.691528Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_14232267640557214613_v1 2026-01-10T12:14:28.691587Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7593705196673892570:2538] destroyed 2026-01-10T12:14:28.691646Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_14232267640557214613_v1 2026-01-10T12:14:28.691784Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593705196673892567:2535] disconnected. 2026-01-10T12:14:28.691826Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593705196673892567:2535] disconnected; active server actors: 1 2026-01-10T12:14:28.691857Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593705196673892567:2535] client user disconnected session shared/user_11_1_14232267640557214613_v1 2026-01-10T12:14:28.693826Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0 grpc read done: success: 0 data: 2026-01-10T12:14:28.693854Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0 grpc read failed 2026-01-10T12:14:28.693887Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0 grpc closed 2026-01-10T12:14:28.693913Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|4e5a0763-cc68260a-e529427f-af918bb5_0 is DEAD 2026-01-10T12:14:28.694736Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:14:28.695434Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7593705170904088518:2483] destroyed 2026-01-10T12:14:28.695490Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:14:28.712823Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:28.712878Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.712901Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:28.712927Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.712947Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:28.713139Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:28.713161Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.713182Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:28.713205Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.713223Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:28.820230Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:28.820280Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.820303Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:28.820325Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.820341Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:28.924242Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:28.924296Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.924324Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:28.924402Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:28.924428Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:29.028239Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:29.028290Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:29.028316Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:29.028342Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:29.028360Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TSchemeShardSysNames::CreateOpsAreCovered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser >> TPQTest::TestPartitionTotalQuota >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> TPQTabletTests::Parallel_Transactions_1 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser >> TPQTabletTests::Parallel_Transactions_2 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:26.510970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:26.511065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.511106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:26.511149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:26.511212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:26.511254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:26.511360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:26.511473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:26.512344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:26.512708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:26.610080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:26.610155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:26.625910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:26.626050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:26.626222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:26.633301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:26.633539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:26.634093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:26.634265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:26.636565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.636767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:26.637849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:26.637903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:26.637958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:26.638006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:26.638047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:26.638298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:26.792051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.793968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.794043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.794133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.794206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:26.794276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 46678944, LocalPathId: 39] 2026-01-10T12:14:36.775852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:36.775897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:14:36.775945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:14:36.775982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:14:36.776895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.776964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:14:36.777089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:36.777154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:36.777216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:36.777255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:36.777304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:14:36.777354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:36.777418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:14:36.777465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:14:36.777643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:14:36.777703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-10T12:14:36.777767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 9 2026-01-10T12:14:36.777809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-10T12:14:36.779083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:36.779210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:36.779270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:36.779335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:14:36.779386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:14:36.780185Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-10T12:14:36.780375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:14:36.780430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:14:36.780526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:36.781200Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-10T12:14:36.781995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:36.782104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:36.782157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:36.782209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 9 2026-01-10T12:14:36.782260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:14:36.782408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:14:36.782782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:14:36.784207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:14:36.788190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:36.788425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:14:36.790848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:36.791020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:14:36.791148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:14:36.791717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:14:36.791803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:14:36.792415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:14:36.792548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:36.792603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1165:3068] TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:37.377250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:37.377592Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 408us result status StatusSuccess 2026-01-10T12:14:37.378164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TPQTabletTests::Partition_Send_Predicate_With_False >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:14:23.151332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:23.151421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.151461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:23.151501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:23.151562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:23.151618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:23.151680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:23.151794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:23.152662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:23.152996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:23.249618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:14:23.249704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:23.286775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:23.286893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:23.287127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:23.308398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:23.308758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:23.309694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:23.309980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:23.313173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.313381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:23.314813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:23.314883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:23.314970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:23.315023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:23.315107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:23.315447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:23.551858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.556838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.557924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.558009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:14:23.558084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 37.617451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:37.618146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:37.618207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:37.618412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:14:37.618592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:37.618640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:14:37.618688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:14:37.619337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:37.619402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:14:37.619512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:37.619556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:14:37.619601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-10T12:14:37.621128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:37.621239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:37.621279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:37.621324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-10T12:14:37.621371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:14:37.626328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:37.626451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:14:37.626488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:14:37.626528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:14:37.626569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:14:37.626672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:14:37.631464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:14:37.631539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:37.631939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:14:37.632154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:37.632202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:37.632247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:14:37.632280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:37.632322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:14:37.632403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:824:2791] message: TxId: 103 2026-01-10T12:14:37.632455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:14:37.632497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:14:37.632531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:14:37.632641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:14:37.633298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:37.633341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:14:37.633932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:37.638423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:14:37.638674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:37.638726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-10T12:14:37.639060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:14:37.639107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1749:3674] 2026-01-10T12:14:37.639783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-10T12:14:37.643543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:14:37.643804Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 297us result status StatusSuccess 2026-01-10T12:14:37.644730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] Test command err: Starting YDB, grpc: 16492, msgbus: 14911 2026-01-10T12:09:44.236287Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593703974583861120:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:09:44.236389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:09:44.767896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:09:44.814476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:44.814633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:44.867035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:44.893847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:44.956253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:09:45.271440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:09:45.271473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:09:45.271482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:09:45.271602Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:09:45.296668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:09:45.754162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593703974583861332:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:09:45.754208Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593703978878829456:2736] HANDLE EvNavigateScheme dc-1 2026-01-10T12:09:45.754506Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593703978878829456:2736] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.810353Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593703978878829456:2736] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-10T12:09:45.817181Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593703978878829456:2736] Handle TEvDescribeSchemeResult Forward to# [1:7593703978878829455:2735] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:09:45.828437Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703974583861332:2118] Handle TEvProposeTransaction 2026-01-10T12:09:45.828473Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703974583861332:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:09:45.828540Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703974583861332:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593703978878829461:2741] 2026-01-10T12:09:45.957123Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703978878829461:2741] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.957230Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703978878829461:2741] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.957258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703978878829461:2741] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.957323Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703978878829461:2741] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.957625Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703978878829461:2741] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.957992Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703978878829461:2741] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-10T12:09:45.958093Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703978878829461:2741] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-10T12:09:45.958236Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703978878829461:2741] txid# 281474976710657 HANDLE EvClientConnected 2026-01-10T12:09:45.958934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:45.969515Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703978878829461:2741] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-10T12:09:45.969575Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703978878829461:2741] txid# 281474976710657 SEND to# [1:7593703978878829460:2740] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-10T12:09:45.992957Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593703974583861332:2118] Handle TEvProposeTransaction 2026-01-10T12:09:45.992983Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593703974583861332:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-10T12:09:45.993011Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593703974583861332:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7593703978878829497:2774] 2026-01-10T12:09:45.995454Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593703978878829497:2774] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-10T12:09:45.995495Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7593703978878829497:2774] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-10T12:09:45.995509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7593703978878829497:2774] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-10T12:09:45.995560Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7593703978878829497:2774] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:45.995823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7593703978878829497:2774] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:45.995911Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7593703978878829497:2774] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:45.995964Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7593703978878829497:2774] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-10T12:09:45.996644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:09:45.998821Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7593703978878829497:2774] txid# 281474976710658 HANDLE EvClientConnected 2026-01-10T12:09:45.999678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7593703978878829497:2774] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-10T12:09:45.999720Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7593703978878829497:2774] txid# 281474976710658 SEND to# [1:7593703978878829496:2773] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-10T12:09:48.344795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593703991763731537:2333], DatabaseId: /dc-1, PoolId: default, Failed ... d_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/partition_stats' 2026-01-10T12:14:21.329886Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/query_metrics_one_minute' 2026-01-10T12:14:21.329914Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_read_bytes_one_hour' 2026-01-10T12:14:21.329945Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_duration_one_hour' 2026-01-10T12:14:21.329978Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/compile_cache_queries' 2026-01-10T12:14:21.330007Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_request_units_one_minute' 2026-01-10T12:14:21.330036Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_read_bytes_one_minute' 2026-01-10T12:14:21.330099Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/resource_pool_classifiers' 2026-01-10T12:14:21.330142Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/nodes' 2026-01-10T12:14:21.330171Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_cpu_time_one_hour' 2026-01-10T12:14:21.330203Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_one_minute' 2026-01-10T12:14:21.330230Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_permissions' 2026-01-10T12:14:21.330257Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_groups' 2026-01-10T12:14:21.330285Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_effective_permissions' 2026-01-10T12:14:21.330313Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_owners' 2026-01-10T12:14:21.330342Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/tables' 2026-01-10T12:14:21.330371Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/query_sessions' 2026-01-10T12:14:21.330399Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_by_tli_one_minute' 2026-01-10T12:14:21.330430Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_cpu_time_one_minute' 2026-01-10T12:14:21.348250Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725674 2026-01-10T12:14:21.348431Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725675 2026-01-10T12:14:21.348475Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725677 2026-01-10T12:14:21.348519Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725676 2026-01-10T12:14:21.348562Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725679 2026-01-10T12:14:21.348603Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725678 2026-01-10T12:14:21.348723Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_by_tli_one_hour' 2026-01-10T12:14:21.348779Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_request_units_one_hour' 2026-01-10T12:14:21.348828Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/pg_tables' 2026-01-10T12:14:21.348866Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_one_hour' 2026-01-10T12:14:21.348899Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_group_members' 2026-01-10T12:14:21.348938Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_duration_one_minute' 2026-01-10T12:14:21.348989Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [60:7593705163421124213:2296] at schemeshard: 72075186224037891 Send TEvRosterUpdateFinished 2026-01-10T12:14:21.519469Z node 60 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:24.319445Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7593705155524397369:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:24.319554Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:25.501191Z node 60 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[60:7593705163421123771:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:25.501321Z node 60 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/tenant-db/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:26.300523Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2026-01-10T12:14:26.301388Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:14:26.307346Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:26.678335Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kekx8df5fzp2f116jrejftjy", Request deadline has expired for 0.532206s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23482 TBackTrace::Capture()+28 (0x1B14858C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B637A5C) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD20A40) void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant(NUnitTest::TTestContext&)+3284 (0x1ADA7534) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD7D1A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B6706EA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B63E738) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::TCurrentTest::Execute()+1300 (0x1AD7C4E4) NUnitTest::TTestFactory::Execute()+2176 (0x1B63FEF0) NUnitTest::RunMain(int, char**)+5805 (0x1B66A54D) ??+0 (0x7FF881E90D90) __libc_start_main+128 (0x7FF881E90E40) _start+41 (0x1871E029) |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> TPQTabletTests::Multiple_PQTablets_1 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> DqUnboxedValueToNativeArrowConversion::Struct [GOOD] >> DqUnboxedValueToNativeArrowConversion::Tuple >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a >> TPQTabletTests::Multiple_PQTablets_2 >> DqUnboxedValueToNativeArrowConversion::Tuple [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalListOfOptional [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration |89.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] >> DqOutputChannelTests::SingleRead [GOOD] >> DqOutputChannelTests::PopAll [GOOD] >> DqOutputChannelWithStorageTests::Spill [GOOD] >> DqOutputChannelWithStorageTests::Overflow [GOOD] >> DqOutputWideChannelTests::Overflow [GOOD] >> DqOutputWideChannelTests::BigRow >> TPQTabletTests::One_Tablet_For_All_Partitions |89.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> DqOutputWideChannelTests::BigRow [GOOD] >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] Test command err: 2026-01-10T12:14:37.922469Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.009449Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:14:38.014258Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:14:38.014703Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:38.014780Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:38.014841Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:14:38.014897Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:14:38.014964Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:38.015054Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:14:38.052314Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-10T12:14:38.052509Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:14:38.079208Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:38.083875Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:38.084087Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:38.085363Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:38.085575Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:14:38.086066Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:14:38.086565Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-10T12:14:38.087612Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:14:38.087678Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:14:38.087736Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-10T12:14:38.087800Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:14:38.087874Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:14:38.088605Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:38.089143Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:14:38.089204Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:38.089247Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:38.089299Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:38.089338Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:14:38.089391Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:38.089441Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:38.089518Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:14:38.089566Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:14:38.089606Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:38.089636Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-10T12:14:38.089659Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-10T12:14:38.089689Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:14:38.089725Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:14:38.089764Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:38.090090Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:38.090132Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:38.090206Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:14:38.090420Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:14:38.090647Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:38.094273Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:14:38.094398Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:14:38.094467Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:38.094505Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:38.094543Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:38.094584Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:38.094629Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:38.094683Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:38.095235Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-10T12:14:38.101044Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-10T12:14:38.102128Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-10T12:14:38.102233Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-10T12:14:38.102339Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-10T12:14:38.102389Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-10T12:14:38.102436Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-10T12:14:38.102480Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-10T12:14:38.102539Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-10T12:14:43.011529Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-10T12:14:43.011781Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-10T12:14:43.011841Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-10T12:14:43.011920Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-10T12:14:43.011970Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.012017Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:14:43.012135Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.012196Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:14:43.012298Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-10T12:14:43.012357Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-10T12:14:43.012401Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:14:43.012446Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:14:43.012495Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.012808Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:14:43.015963Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:14:43.016127Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:14:43.016286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.016326Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.016365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.016540Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.016582Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.016633Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:43.016752Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-10T12:14:43.016806Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:14:43.016866Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:14:43.016914Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:14:43.016967Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:14:43.017015Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-10T12:14:43.017059Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-10T12:14:43.017106Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-10T12:14:43.017181Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-10T12:14:43.017244Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:14:43.017303Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:14:43.017342Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:14:43.017379Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:14:43.017417Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:14:43.017458Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-10T12:14:43.017506Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-10T12:14:43.017553Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-10T12:14:43.017592Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:14:43.017631Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-10T12:14:43.041464Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.076364Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.076448Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.076494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.076546Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.076594Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.089488Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.113391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.113461Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.113496Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.113534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.113563Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.113710Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.124858Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.160360Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.160429Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.160466Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.160504Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.160534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.184352Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.195677Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.195765Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.195806Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.195847Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.195880Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.219732Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:43.219813Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.219850Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:43.219892Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:43.219925Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:43.232392Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.256345Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.268717Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:388:2332], now have 1 active actors on pipe |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> DqOutputWideChannelTests::SingleRead [GOOD] >> DqOutputWideChannelTests::PartialRead [GOOD] >> DqOutputWideChannelTests::PopAll [GOOD] >> DqOutputWideChannelWithStorageTests::Spill [GOOD] >> DqOutputWideChannelWithStorageTests::Overflow [GOOD] >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple >> TPQTabletTests::Limit_On_The_Number_Of_Transactons >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval >> ConvertUnboxedValueToArrowAndBack::Tuple >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> ConvertUnboxedValueToArrowAndBack::Tuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverTupleWithOptionals [GOOD] >> DqOutputChannelTests::PartialRead [GOOD] >> DqOutputChannelTests::Overflow [GOOD] >> DqOutputChannelTests::BigRow >> DqOutputChannelTests::BigRow [GOOD] >> DqOutputChannelTests::ChunkSizeLimit [GOOD] >> Channels20::LocalChannelBackPressure >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] Test command err: 2026-01-10T12:14:41.999462Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:42.079355Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:14:42.083994Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:14:42.084455Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:42.084544Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:42.084599Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:14:42.084695Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:14:42.084766Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:42.084858Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:14:42.123542Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-10T12:14:42.123714Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:14:42.149178Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:42.157639Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:42.157853Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:42.159106Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-10T12:14:42.159313Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:14:42.159838Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:14:42.160369Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-10T12:14:42.161440Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:14:42.161497Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:14:42.161545Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-10T12:14:42.161599Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:14:42.161689Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:14:42.162308Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:42.162751Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:14:42.162799Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:42.162839Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:42.162893Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:42.162930Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:14:42.162964Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:42.163046Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:42.163132Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:14:42.163173Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:14:42.163208Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:14:42.163234Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-10T12:14:42.163257Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-10T12:14:42.163285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:14:42.163335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:14:42.163385Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:42.163763Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:42.163809Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:14:42.163875Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:14:42.164070Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:14:42.164285Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:42.169283Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:14:42.169408Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:14:42.169475Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:42.169514Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:42.169547Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:42.169598Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:42.169638Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:42.169687Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:42.170147Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-10T12:14:42.170843Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-10T12:14:42.171982Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-10T12:14:42.172105Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-10T12:14:42.172198Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-10T12:14:42.172254Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-10T12:14:42.172303Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-10T12:14:42.172362Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-10T12:14:42.172423Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... 0001} generation 2 [6:225:2142] 2026-01-10T12:14:47.700554Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:14:47.700610Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:14:47.700651Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-10T12:14:47.700689Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:14:47.700728Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.700762Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:47.700808Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.700849Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:14:47.700970Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:14:47.701158Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:14:47.701384Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|5d0520b5-961e1be4-967d62da-c5d49e9d_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-10T12:14:47.701460Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:14:47.701502Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:14:47.701549Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:14:47.701589Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:47.701641Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:14:47.701708Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:14:47.701751Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-10T12:14:47.701811Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:14:47.701873Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-10T12:14:47.701986Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2026-01-10T12:14:47.702739Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-10T12:14:47.702808Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-10T12:14:47.703295Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-10T12:14:47.703853Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:14:47.703907Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100001(+) to D0000100002(-) 2026-01-10T12:14:47.712034Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:14:47.712248Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:14:47.712325Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:14:47.712376Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.712417Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:47.712464Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.712507Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:14:47.712589Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:14:47.712979Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 0 CachedBlobs 0 2026-01-10T12:14:47.713111Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-10T12:14:47.713171Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-10T12:14:47.713219Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-10T12:14:47.713303Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:14:47.718345Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:14:47.740365Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:47.777219Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:47.777301Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.777351Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:47.777397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.777443Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:47.789402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:14:47.789482Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.789516Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:47.789555Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:47.789606Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-10T12:14:47.912650Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:14:47.924597Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:14:48.251826Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:48.275059Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:48.275156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.275192Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.275227Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.275260Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:14:48.275395Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:48.287513Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:14:48.287594Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.287629Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.287668Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.287701Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-10T12:14:48.304461Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::ChunkSizeLimit [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |89.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |89.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TestArrowBlockSplitter::SplitLargeBlock [GOOD] >> TestArrowBlockSplitter::SplitWithScalars [GOOD] >> TestArrowBlockSplitter::PassSmallBlock [GOOD] >> TestArrowBlockSplitter::CheckLargeRows [GOOD] >> TestArrowBlockSplitter::CheckLargeScalarRows [GOOD] >> HashShuffle::BackPressureWithSpillingMulti [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |89.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingMulti [GOOD] |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2026-01-10T12:13:47.060350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705019042298374:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:47.060417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:47.078126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:13:47.200215Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:47.405336Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:13:47.656519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:47.656944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:47.657068Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:13:47.951560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:48.029397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:48.082957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:48.083080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:48.093251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:48.093330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:48.209944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:48.225733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:48.271357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:48.274750Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:13:48.281621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:48.370951Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:48.439177Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.016144s 2026-01-10T12:13:48.612274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:48.616160Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:48.728942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003607/r3tmp/yandexbiXGr6.tmp 2026-01-10T12:13:48.728972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003607/r3tmp/yandexbiXGr6.tmp 2026-01-10T12:13:48.729162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003607/r3tmp/yandexbiXGr6.tmp 2026-01-10T12:13:48.729275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:48.840402Z INFO: TTestServer started on Port 61561 GrpcPort 62721 PQClient connected to localhost:62721 2026-01-10T12:13:49.195400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:49.371055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:52.060567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705019042298374:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:52.060668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:13:52.982635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705040517136159:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:52.982763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:52.986492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705040517136175:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:52.986568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705040517136176:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:52.986753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:13:52.991603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:13:53.043124Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705040517136179:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:13:53.412247Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705044812103560:3073] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:13:53.446836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:53.476194Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008106s 2026-01-10T12:13:53.598220Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705044812103576:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:13:53.601300Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmE3ZTQ5ODgtNTI2YTIyNi1jN2VjZTU2ZC0zNzQ3NjM1Nw==, ActorId: [1:7593705040517136155:2332], ActorState: ExecuteState, LegacyTraceId: 01kekx7hxm7ta3vb7jxv3ksn2w, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:13:53.628202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:53.633703Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:13:53.794467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESch ... : partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.761908Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.761920Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:48.856231Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:48.856277Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.856295Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.856318Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.856336Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:48.866342Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:48.866384Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.866401Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.866424Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.866442Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:48.866494Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:48.866505Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.866514Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.866526Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.866534Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:48.996197Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:48.996245Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.996262Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:48.996286Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:48.996303Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:49.000438Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:49.000474Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.000491Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.000512Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.000532Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:49.000580Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:49.000593Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.000602Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.000615Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.000624Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:49.097772Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:49.097818Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.097839Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.097866Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.097886Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:49.107395Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:49.107447Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.107467Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.107493Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.107512Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:49.107572Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:49.107585Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.107595Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.107610Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.107620Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:49.200509Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:49.200560Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.200580Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.200618Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.200637Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:49.207385Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:49.207427Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.207452Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.207478Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.207498Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:49.207555Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:49.207568Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.207579Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.207592Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.207603Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-10T12:14:49.302529Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:49.302579Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.302600Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.302626Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.302645Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:14:49.308557Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-10T12:14:49.308597Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.308615Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.308649Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.308665Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-10T12:14:49.308715Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-10T12:14:49.308729Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.308741Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:49.308755Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:49.308763Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin |89.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval [GOOD] >> DqUnboxedValueDoNotFitToArrow::OptionalOfOptional [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] >> ConvertUnboxedValueToArrowAndBack::Struct [GOOD] >> ConvertUnboxedValueToArrowAndBack::ListOfJsons [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalListOfOptional >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser >> ConvertUnboxedValueToArrowAndBack::OptionalListOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::LargeVariant >> Channels20::IcChannelLateBinding >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |89.2%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |89.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |89.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous >> KqpYql::TestUuidDefaultColumn |89.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |89.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |89.2%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DqUnboxedValueToNativeArrowConversion::VariantOverTupleWithOptionals [GOOD] >> HashShuffle::BackPressureInMemory [GOOD] >> HashShuffle::BackPressureInMemoryMulti [GOOD] >> HashShuffle::BackPressureInMemoryLoad |89.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous |89.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |89.3%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::BrokenCompressedData [GOOD] Test command err: 2026-01-10T12:12:40.933477Z :ReadSession INFO: Random seed for debugging is 1768047160933436 2026-01-10T12:12:41.289868Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704737068518131:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:41.294157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:41.376514Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:41.500609Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:41.737706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:41.739263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:41.744338Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:42.005890Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:42.034055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:42.059376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:42.059567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:42.061016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:42.061088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:42.109465Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:42.109636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.147742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.260960Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:42.296252Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:42.448486Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:42.464309Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:42.585181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:42.932630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0029e2/r3tmp/yandexXBTT2e.tmp 2026-01-10T12:12:42.932664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0029e2/r3tmp/yandexXBTT2e.tmp 2026-01-10T12:12:42.932839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0029e2/r3tmp/yandexXBTT2e.tmp 2026-01-10T12:12:42.932949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:43.092324Z INFO: TTestServer started on Port 64910 GrpcPort 14472 PQClient connected to localhost:14472 2026-01-10T12:12:43.464416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:43.483006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:46.276213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704737068518131:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:46.276274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:46.778837Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704757443965334:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.778941Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.779053Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704757443965339:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.780090Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704757443965348:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.780177Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.791021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:46.823691Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593704757443965349:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-10T12:12:46.971889Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593704757443965380:2144] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:47.258425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:47.268260Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593704757443965387:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:47.268928Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODQyNzkyZDgtMmRiOGM1YmUtZmI2NDMyYzUtYTQxYmY1ZGY=, ActorId: [2:7593704757443965331:2302], ActorState: ExecuteState, LegacyTraceId: 01kekx5h9d97qvgqntdefajtzk, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:47.268947Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593704758543356002:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:47.270407Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzM1NjQzZi0yMDU1YmU0Ni05NzQwZTFjZi04OTcwYjBkZg==, ActorId: [1:7593704758543355951:2330], ActorState: ExecuteState, LegacyTraceId: 01kekx5hcwamtm19nbapazbmkf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:47.271731Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permiss ... d/user session shared/user_13_1_12345595644779641422_v1 response to read: guid# 5063fe6-cd823094-95321686-bc405d6f 2026-01-10T12:14:53.273161Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 Process answer. Aval parts: 0 2026-01-10T12:14:53.274718Z :DEBUG: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:53.275477Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 grpc read done: success# 1, data# { read { } } 2026-01-10T12:14:53.275615Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 got read request: guid# 1ce2344-655adf0-a2204617-c7884fa6 2026-01-10T12:14:53.280111Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2026-01-10T12:14:53.280353Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2026-01-10T12:14:53.280446Z :DEBUG: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2026-01-10T12:14:52.108000Z WriteTime: 2026-01-10T12:14:52.120000Z Ip: "ipv6:[::1]:53744" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:53744" } } } } 2026-01-10T12:14:53.280731Z :INFO: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] Closing read session. Close timeout: 3.000000s 2026-01-10T12:14:53.280792Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-10T12:14:53.280851Z :INFO: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3700 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:14:53.281694Z :INFO: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] Closing read session. Close timeout: 0.000000s 2026-01-10T12:14:53.281758Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-10T12:14:53.281822Z :INFO: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3701 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:14:53.281972Z :NOTICE: [/Root] [/Root] [61dd00f4-5e0a4106-a6d92162-39abc8d0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:14:53.285650Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 grpc read done: success# 0, data# { } 2026-01-10T12:14:53.285687Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 grpc read failed 2026-01-10T12:14:53.285728Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 grpc closed 2026-01-10T12:14:53.285772Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_13_1_12345595644779641422_v1 is DEAD 2026-01-10T12:14:53.287592Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_13_1_12345595644779641422_v1 2026-01-10T12:14:53.287647Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [13:7593705285057512808:2513] destroyed 2026-01-10T12:14:53.287712Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_13_1_12345595644779641422_v1 2026-01-10T12:14:53.287957Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [13:7593705285057512805:2510] disconnected. 2026-01-10T12:14:53.287998Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [13:7593705285057512805:2510] disconnected; active server actors: 1 2026-01-10T12:14:53.288027Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [13:7593705285057512805:2510] client user disconnected session shared/user_13_1_12345595644779641422_v1 2026-01-10T12:14:53.331740Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:53.331788Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.331811Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:53.331837Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.331861Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:53.436091Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:53.436130Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.436147Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:53.436170Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.436188Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:53.536477Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:53.536513Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.536531Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:53.536552Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.536569Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:53.637283Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:14:53.637325Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.637341Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:14:53.637363Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:14:53.637383Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:14:53.926140Z node 13 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [13:7593705302237382188:2538] TxId: 281474976715683. Ctx: { TraceId: 01kekx9cp89n7rh85hf6vcryhg, Database: /Root, SessionId: ydb://session/3?node_id=13&id=YTY4Y2NmYjctZDE3ZGE2ZjgtY2ZlMGUwOC03MGNjOTMwYg==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 14 trace_id# 2026-01-10T12:14:53.926328Z node 13 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [13:7593705302237382192:2538], TxId: 281474976715683, task: 3. Ctx: { CheckpointId : . TraceId : 01kekx9cp89n7rh85hf6vcryhg. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=13&id=YTY4Y2NmYjctZDE3ZGE2ZjgtY2ZlMGUwOC03MGNjOTMwYg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [13:7593705302237382188:2538], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2026-01-10T12:14:56.671758Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:56.671815Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:56.671877Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:14:56.700566Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:14:56.701282Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:14:56.701594Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:14:56.707831Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:14:56.716397Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:14:56.717275Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-10T12:14:56.717434Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-10T12:14:56.717693Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:14:56.717774Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:14:56.717818Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:14:56.717898Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } |89.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 5634, MsgBus: 13400 2026-01-10T12:13:46.516765Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705014867359426:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:46.517109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:47.055366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:47.057359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:47.059131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:47.070452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:47.194480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705014867359201:2081] 1768047226445507 != 1768047226445510 2026-01-10T12:13:47.195969Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:47.273992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:47.274016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:47.274026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:47.274135Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:47.331337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:47.509343Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:48.120496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:48.120883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:48.122905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:48.123610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:48.130519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047228178, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:48.131526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:13:48.131573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-10T12:13:48.131774Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705019162327034:2257] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:13:48.132187Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705014867359169:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.132294Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705014867359172:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.132359Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705014867359175:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.132579Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019162326965:2216][/Root] Path was updated to new version: owner# [1:7593705014867359487:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.132960Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705019162327034:2257] Ack update: ack to# [1:7593705014867359560:2149], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:48.133102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:13:48.133660Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019162327083:2288][/Root] Path was updated to new version: owner# [1:7593705019162327077:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.133860Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019162327084:2289][/Root] Path was updated to new version: owner# [1:7593705019162327078:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:50.954090Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:50.955608Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010e1/r3tmp/spilling-tmp-runner/node_1_ddcf02b5-94444878-e44b827e-3ca332d3, actor: [1:7593705032047229230:2307] 2026-01-10T12:13:50.955789Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010e1/r3tmp/spilling-tmp-runner 2026-01-10T12:13:50.962308Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7d7p6kjk3fhyfq2d6d0x", Request has 18444976026478.589344s seconds to be completed 2026-01-10T12:13:50.967025Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7d7p6kjk3fhyfq2d6d0x", Created new session, sessionId: ydb://session/3?node_id=1&id=YzM0MTViZTctZWEzZGI3MjctYmM2NjY5MjEtMTQ3MWMwMA==, workerId: [1:7593705032047229250:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:13:50.967251Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7d7p6kjk3fhyfq2d6d0x 2026-01-10T12:13:50.967316Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:50.967348Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:50.967374Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:50.968949Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705032047229254:2498][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705014867359487:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:50.969842Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705032047229253:2497][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705014867359487:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:13:50.981120640 229038 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:50.981297579 229038 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:13:51.011363083 229037 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.011518378 229037 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.015041Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705032047229269:2500][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705014867359487:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:51.021602Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:51.037263776 229037 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.037406920 229037 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.041820Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:13:51.045904Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7g2506y0jn2x73vd26nh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzM0MTViZTctZWEzZGI3MjctYmM2NjY5MjEtMTQ3MWMwMA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705032047229250:2326] 2026-01-10T12:13:51.045958Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7593705036342196574:2502] 2026-01-10T12:13:51.047698Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705036342196576:2503][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7593705014867359487:2119], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: 2026-01-10T12:14:55.591299Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705311129968856:2345] TxId: 281474976710662. Ctx: { TraceId: 01kekx9ejd8avmkfkg0gyn2pce, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OTQ2MzIyMWItOTk5YjUzNDktZTE0MGVjNDctYzVkZGY5YTI=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:14:55.597874Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9ejd8avmkfkg0gyn2pce, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OTQ2MzIyMWItOTk5YjUzNDktZTE0MGVjNDctYzVkZGY5YTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705311129968860:2360] 2026-01-10T12:14:55.598891Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9ejd8avmkfkg0gyn2pce, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OTQ2MzIyMWItOTk5YjUzNDktZTE0MGVjNDctYzVkZGY5YTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705311129968861:2361] 2026-01-10T12:14:55.599839Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9ejd8avmkfkg0gyn2pce", Forwarded response to sender actor, requestId: 4, sender: [9:7593705311129968753:2344], selfId: [9:7593705272475262252:2171], source: [9:7593705311129968754:2345] 2026-01-10T12:14:55.601452Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=OTQ2MzIyMWItOTk5YjUzNDktZTE0MGVjNDctYzVkZGY5YTI=, workerId: [9:7593705311129968754:2345], local sessions count: 0 E0110 12:14:55.817302514 250714 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:55.817527506 250714 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:55.863978Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:53.821402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:53.935901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:53.946409Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:53.946911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:53.947078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:54.335051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:54.335202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:54.341625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046990526952 != 1768046990526955 2026-01-10T12:09:54.352735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:54.400515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:54.507351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:54.748940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:54.998369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:55.140271Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:55.140357Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:55.140459Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-10T12:09:55.266229Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:55.266328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:55.266966Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:55.267065Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:55.267466Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:55.267589Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:55.267751Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:55.268047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:55.269825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:55.270978Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:55.271054Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:55.305858Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:55.307076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:55.307374Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2026-01-10T12:09:55.307715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:55.365207Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:55.366058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:55.366200Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:55.368010Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:55.368125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:55.368262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:55.368678Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:55.368832Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:55.368940Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-10T12:09:55.379829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:55.432115Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:55.432343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:55.432472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-10T12:09:55.432516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:55.432551Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:55.432610Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:55.432855Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:55.432912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:55.433309Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:55.433420Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:55.433504Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:55.433552Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:55.433603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:55.433639Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:55.433675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:55.433708Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:55.433770Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:55.434225Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:55.434274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:55.434331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:887:2767], serverId# [1:892:2770], sessionId# [0:0:0] 2026-01-10T12:09:55.434420Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2026-01-10T12:09:55.434455Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:55.434559Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:55.434815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:55.434881Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:55.434965Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transacti ... d with status OK role: Leader [27:1173:2971] 2026-01-10T12:14:53.109941Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [27:1173:2971] 2026-01-10T12:14:53.110055Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1173:2971] 2026-01-10T12:14:53.110220Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72075186224037888] shutdown pipe due to pending shutdown request [27:1173:2971] 2026-01-10T12:14:53.110323Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [27:1173:2971] 2026-01-10T12:14:53.110654Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [27:1172:2970], Recipient [27:918:2782]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-10T12:14:53.110827Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2026-01-10T12:14:53.110990Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:14:53.111172Z node 27 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2026-01-10T12:14:53.111399Z node 27 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [27:1172:2970], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2026-01-10T12:14:53.111571Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:14:53.111722Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:14:53.112416Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2026-01-10T12:14:53.112562Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:16} starting compaction 2026-01-10T12:14:53.113103Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} starting Scan{1 on 1001, Compact{72075186224037888.1.16, eph 1}} 2026-01-10T12:14:53.113301Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} started compaction 1 2026-01-10T12:14:53.113412Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 10610073991673364614 2026-01-10T12:14:53.225472Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 16, product {tx status + 1 parts epoch 2} done 2026-01-10T12:14:53.225991Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2026-01-10T12:14:53.226465Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2026-01-10T12:14:53.226584Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2026-01-10T12:14:53.227253Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.527387Z 2026-01-10T12:14:53.227499Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2026-01-10T12:14:53.227652Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:14:53.227814Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-10T12:14:53.227955Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [27:1172:2970]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:14:53.228866Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2026-01-10T12:14:53.229030Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 16893947808288796996 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 11610892589619072348 ========= Starting an immediate read ========= 2026-01-10T12:14:53.507208Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037888] send [27:1107:2916] 2026-01-10T12:14:53.507351Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1107:2916] 2026-01-10T12:14:53.508181Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [27:1197:2978], Recipient [27:918:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-10T12:14:53.508491Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-10T12:14:53.508656Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:14:53.508864Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:14:53.508982Z node 27 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:14:53.509109Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2502/18446744073709551615 2026-01-10T12:14:53.509267Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-10T12:14:53.509472Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:14:53.509588Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:14:53.509680Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:14:53.509763Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:14:53.509845Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-10T12:14:53.509944Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:14:53.509986Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:14:53.510021Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:14:53.510052Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:14:53.510258Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:14:53.510595Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is DelayComplete 2026-01-10T12:14:53.510670Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:14:53.510781Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:14:53.510866Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:14:53.510937Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:14:53.510969Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:14:53.511016Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-10T12:14:53.511117Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:14:53.511288Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:14:53.511445Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:14:53.632683Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:14:53.632881Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:14:53.633255Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{16, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-10T12:14:53.633442Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:14:53.634421Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2026-01-10T12:14:53.634824Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [27:551:2486] 2026-01-10T12:14:53.634937Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [27:551:2486] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 27923, MsgBus: 5611 2026-01-10T12:13:47.538670Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705020136536901:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:47.548992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:47.876198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:47.883280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:47.883414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:48.030320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:48.113958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:48.136579Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705020136536791:2081] 1768047227523376 != 1768047227523379 2026-01-10T12:13:48.140161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:48.271844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:48.271873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:48.271881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:48.271983Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:48.554611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:48.915798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:48.916301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:48.923839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:48.931880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:48.934479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047228983, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:48.935941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:13:48.936021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-10T12:13:48.936307Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705020136537321:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:13:48.936794Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705020136536759:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.936938Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705020136536762:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.937000Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705020136536765:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.937233Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705024431504754:2289][/Root] Path was updated to new version: owner# [1:7593705024431504748:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.937387Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705020136537244:2211][/Root] Path was updated to new version: owner# [1:7593705020136537072:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.937508Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705024431504753:2288][/Root] Path was updated to new version: owner# [1:7593705024431504747:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.937897Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705020136537321:2247] Ack update: ack to# [1:7593705020136537151:2151], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:48.938077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 E0110 12:13:51.905218281 229690 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.905443749 229690 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:13:51.913272161 229690 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.913417656 229690 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.895636Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:51.915568Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010de/r3tmp/spilling-tmp-runner/node_1_fe876aee-70ce0dad-910573c2-8d84f70c, actor: [1:7593705037316406823:2307] 2026-01-10T12:13:51.919421Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7e0z29v8r5ad2sj29dgc", Request has 18444976026477.632252s seconds to be completed 2026-01-10T12:13:51.921740Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010de/r3tmp/spilling-tmp-runner 2026-01-10T12:13:51.926290Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7e0z29v8r5ad2sj29dgc", Created new session, sessionId: ydb://session/3?node_id=1&id=YzIxOTA1ZjctM2I1MTNlNTYtMzM4YmYxYmEtNmUzNGRmMmI=, workerId: [1:7593705037316406843:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:13:51.926541Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7e0z29v8r5ad2sj29dgc 2026-01-10T12:13:51.926672Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:51.926703Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:51.926733Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:51.965428Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:13:51.966511Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705037316406848:2498][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705020136537072:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:51.967294Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705037316406849:2499][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705020136537072:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:51.967487Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705037316406850:2500][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705020136537072:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:13:51.974078588 229689 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.974236035 229689 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.980725Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:13:51.983151Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7gzdahjpbtyz4qdpaqrf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxOTA1ZjctM2I1MTNlNTYtMzM4YmYxYmEtNmUzNGRmMmI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705037316406843:2326] 2026-01-10T12:13:51.983222Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7593705037316406872:2503] 2026-01-10T12:13:51.988961Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705037316406875:2504][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7593705020136537072:2114], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:14:55.282789Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705312542659736:2347] TxId: 281474976715662. Ctx: { TraceId: 01kekx9e5j9rkg7n8808nwd5km, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjNjYThjMWUtZjVmYzMyNTAtYjE1MDZhMDEtNWQ1ZWFiNzk=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:14:55.291928Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kekx9e5j9rkg7n8808nwd5km, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjNjYThjMWUtZjVmYzMyNTAtYjE1MDZhMDEtNWQ1ZWFiNzk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705312542659740:2360] 2026-01-10T12:14:55.292139Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kekx9e5j9rkg7n8808nwd5km, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjNjYThjMWUtZjVmYzMyNTAtYjE1MDZhMDEtNWQ1ZWFiNzk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705312542659741:2361] 2026-01-10T12:14:55.293403Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9e5j9rkg7n8808nwd5km", Forwarded response to sender actor, requestId: 4, sender: [9:7593705308247692340:2346], selfId: [9:7593705269592985932:2265], source: [9:7593705308247692341:2347] 2026-01-10T12:14:55.295545Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MjNjYThjMWUtZjVmYzMyNTAtYjE1MDZhMDEtNWQ1ZWFiNzk=, workerId: [9:7593705308247692341:2347], local sessions count: 0 E0110 12:14:55.658024509 250155 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:55.658237951 250155 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpYql::NonStrictDml >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |89.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:09:50.295078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:09:50.428779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:09:50.440055Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:09:50.440669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:09:50.440816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:09:51.064882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:09:51.065030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:09:51.071587Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768046987243848 != 1768046987243851 2026-01-10T12:09:51.085974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:09:51.137245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:09:51.274957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:09:51.494860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:09:51.771798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:09:51.927965Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:09:51.928039Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:09:51.928212Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-10T12:09:52.082862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:09:52.083025Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:09:52.083750Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:09:52.083881Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:09:52.084374Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:09:52.084527Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:09:52.084690Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:09:52.085067Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:09:52.087072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:09:52.088425Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:09:52.088534Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:09:52.128008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:09:52.133395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:09:52.134277Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2026-01-10T12:09:52.134597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:09:52.211851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:09:52.215533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:09:52.215704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:09:52.218085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:09:52.218218Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:09:52.218289Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:09:52.218776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:09:52.218954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:09:52.219072Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-10T12:09:52.234966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:09:52.298231Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:09:52.298463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:09:52.298604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-10T12:09:52.298652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:09:52.298692Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:09:52.298756Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:09:52.299003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:52.299076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:09:52.299520Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:09:52.299628Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:09:52.299698Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:09:52.299758Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:09:52.299810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:09:52.299849Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:09:52.299885Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:09:52.299922Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:09:52.299983Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:09:52.300474Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:52.300529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:09:52.300590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:887:2767], serverId# [1:892:2770], sessionId# [0:0:0] 2026-01-10T12:09:52.300679Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2026-01-10T12:09:52.300724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:09:52.300831Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:09:52.301082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:09:52.301143Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:09:52.301234Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transacti ... 37: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:14:55.968791Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:14:55.968925Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-10T12:14:55.969227Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-10T12:14:55.969286Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[29:1291:3056], 1} after executionsCount# 1 2026-01-10T12:14:55.969340Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[29:1291:3056], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:14:55.969425Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[29:1291:3056], 1} finished in read 2026-01-10T12:14:55.969492Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-10T12:14:55.969534Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:14:55.969570Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:14:55.969607Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:14:55.969664Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-10T12:14:55.969694Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:14:55.969729Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037889 has finished 2026-01-10T12:14:55.969767Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:14:55.969872Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:14:55.969937Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:14:55.969986Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:14:55.970420Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037889] send [29:1145:2944] 2026-01-10T12:14:55.970467Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037889] push event to server [29:1145:2944] 2026-01-10T12:14:55.970630Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1291:3056], Recipient [29:927:2787]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:14:55.970688Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } 2026-01-10T12:14:55.970956Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037890] ::Bootstrap [29:1293:3059] 2026-01-10T12:14:55.971080Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037890] lookup [29:1293:3059] 2026-01-10T12:14:55.971283Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037890] queue send [29:1293:3059] 2026-01-10T12:14:55.971399Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037890] forward result local node, try to connect [29:1293:3059] 2026-01-10T12:14:55.971450Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037890]::SendEvent [29:1293:3059] 2026-01-10T12:14:55.971685Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [29:1294:3060], Recipient [29:1248:3028]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:14:55.971731Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:14:55.971778Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [29:1293:3059], serverId# [29:1294:3060], sessionId# [0:0:0] 2026-01-10T12:14:55.971834Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037890] connected with status OK role: Leader [29:1293:3059] 2026-01-10T12:14:55.971882Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037890] send queued [29:1293:3059] 2026-01-10T12:14:55.971918Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1293:3059] 2026-01-10T12:14:55.972682Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [29:1291:3056], Recipient [29:1248:3028]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-10T12:14:55.972815Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-10T12:14:55.972880Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:14:55.972988Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-10T12:14:55.973061Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-10T12:14:55.973157Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:14:55.973191Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-10T12:14:55.973225Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-10T12:14:55.973265Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-10T12:14:55.973328Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-10T12:14:55.973386Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:14:55.973420Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-10T12:14:55.973451Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-10T12:14:55.973487Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-10T12:14:55.973614Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-10T12:14:55.973872Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-10T12:14:55.973932Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[29:1291:3056], 2} after executionsCount# 1 2026-01-10T12:14:55.973983Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[29:1291:3056], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:14:55.974062Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[29:1291:3056], 2} finished in read 2026-01-10T12:14:55.974124Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:14:55.974161Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-10T12:14:55.974194Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:14:55.974229Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:14:55.974284Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:14:55.974315Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:14:55.974353Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-10T12:14:55.974391Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-10T12:14:55.974509Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:14:55.974571Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:14:55.974618Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-10T12:14:55.977570Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037890] send [29:1293:3059] 2026-01-10T12:14:55.977623Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1293:3059] 2026-01-10T12:14:55.977816Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1291:3056], Recipient [29:1248:3028]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-10T12:14:55.977880Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] Test command err: 2026-01-10T12:14:41.803705Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705249943923314:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:41.818571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:41.899126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:42.360803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:42.360939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:42.378061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:42.730969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705249943923277:2081] 1768047281798785 != 1768047281798788 2026-01-10T12:14:42.731067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:42.765996Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:42.839384Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:42.958403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:42.990813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:43.004163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:44.043966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:46.820009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705249943923314:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:46.828304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:47.662003Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7593705275713728082:2604], ActorId: [1:7593705275713728083:2604], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=MzZlOTk0OTktOTc2ZDk4ODctYWZkODRmZjgtYTE2YTkxMzA=, TxId: 01kekx97am8aqfed7y1056pdcr 2026-01-10T12:14:47.677454Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_SESSION, issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=MzZlOTk0OTktOTc2ZDk4ODctYWZkODRmZjgtYTE2YTkxMzA= } 2026-01-10T12:14:47.701941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705275713728107:2328], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:47.702036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:47.702514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705275713728116:2329], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:47.702554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:48.156566Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:48.178909Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:48.535339Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:48.628270Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705280619841150:2081] 1768047288025773 != 1768047288025776 2026-01-10T12:14:48.646787Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:48.663387Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:48.663468Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:48.751862Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:48.906642Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:49.130569Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:50.967775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:50.978432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:52.004997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> Channels20::LocalChannelBackPressure [GOOD] >> Channels20::LocalChannelAsyncRead |89.3%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: Trying to start YDB, gRPC: 5690, MsgBus: 63933 2026-01-10T12:14:26.651819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705186223159491:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:26.651957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:26.978839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:27.027991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:27.028178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:27.110142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:27.128580Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:27.129279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705186223159383:2081] 1768047266647920 != 1768047266647923 2026-01-10T12:14:27.272173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:27.408803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:27.408831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:27.408838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:27.408912Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:27.670492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:27.994750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:14:27.995138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:14:27.997300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:27.998109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:14:28.000927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047268043, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:14:28.002536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:14:28.002593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-10T12:14:28.002890Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705186223159915:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:14:28.003110Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705186223159354:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:28.003235Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705186223159357:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:28.003413Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705186223159831:2204][/Root] Path was updated to new version: owner# [1:7593705186223159653:2109], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:28.003419Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705190518127290:2289][/Root] Path was updated to new version: owner# [1:7593705190518127284:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:28.003648Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705190518127289:2288][/Root] Path was updated to new version: owner# [1:7593705190518127283:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:28.004108Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705186223159915:2251] Ack update: ack to# [1:7593705186223159742:2151], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:14:28.004861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:14:28.005453Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705186223159351:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false Trying to start YDB, gRPC: 24472, MsgBus: 11056 2026-01-10T12:14:31.111564Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:31.132197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:31.241313Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:31.246676Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705210529174131:2081] 1768047271044417 != 1768047271044420 2026-01-10T12:14:31.272433Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:31.272548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:31.336581Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:31.353457Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:31.589665Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:31.589685Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:31.589694Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:31.589761Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:32.088293Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:32.505604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:14:32.505904Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:14:32.505964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:32.506530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:14:32.509800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047272558, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:14:32.510863Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:14:32.510913Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-10T12:14:32.511160Z node 2 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [2:7593705210529174657:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:14:32.511311Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7593705210529174102:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:32.511405Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7593705210529174105:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:32.511550Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:7593705210529174417:2125][/Root] Path was updated to new vers ... t execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:46.250192Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:46.493884Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:46.493914Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:46.493924Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:46.494014Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:46.720206Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:47.617843Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:47.790533Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:47.888826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:48.119638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:48.305367Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:50.700417Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593705269842940671:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:50.700508Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:52.525596Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593705299907713612:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.525712Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.526019Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593705299907713621:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.526066Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:53.648451Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:53.719773Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:53.806368Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:53.866973Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:53.975736Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:54.076730Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:54.363428Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:54.503696Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:54.752921Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593705308497649124:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:54.753004Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:54.753551Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593705308497649129:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:54.753594Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593705308497649130:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:54.753623Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:54.759553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:54.795223Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593705308497649133:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:14:54.878484Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593705308497649186:3783] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:58.449024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-10T12:14:58.468527Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-10T12:14:58.720109Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593705325677518769:2570], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2026-01-10T12:14:58.722950Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=OThhNzc4YjEtMzY5NDg2YWUtODkwZDkxNjMtZjE4ZTM5MWM=, ActorId: [4:7593705325677518767:2569], ActorState: ExecuteState, LegacyTraceId: 01kekx9hzd4212zmgadwawaymp, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 17 } message: "Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn\'t exist, please contact internal support" end_position { row: 2 column: 17 } severity: 1 } } tx_id# trace_id# |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpScripting::StreamExecuteYqlScriptScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22827, MsgBus: 31974 2026-01-10T12:13:46.684467Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705015117664692:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:46.685366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:47.272239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:47.272373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:47.348033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:47.363681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:47.373764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:47.375610Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705015117664629:2081] 1768047226622016 != 1768047226622019 2026-01-10T12:13:47.574279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:47.591344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:47.591373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:47.591380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:47.591465Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:47.694221Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:48.390273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:48.397772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:48.400311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:48.401106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:48.405636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047228451, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:48.411926Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705019412632463:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-10T12:13:48.411958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-10T12:13:48.412001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-10T12:13:48.412216Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705015117664600:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.412216Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705015117664597:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.412394Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705015117664603:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:48.412592Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705019412632463:2251] Ack update: ack to# [1:7593705015117664994:2150], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:48.412594Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019412632393:2211][/Root] Path was updated to new version: owner# [1:7593705015117664916:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.412794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-10T12:13:48.413435Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019412632571:2288][/Root] Path was updated to new version: owner# [1:7593705019412632564:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:48.413716Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705019412632572:2289][/Root] Path was updated to new version: owner# [1:7593705019412632565:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:51.279238Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:51.279952Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010df/r3tmp/spilling-tmp-runner/node_1_fbda607c-de7807e0-ece4d687-d136fee4, actor: [1:7593705036592501961:2307] 2026-01-10T12:13:51.280671Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010df/r3tmp/spilling-tmp-runner 2026-01-10T12:13:51.300277Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705036592501977:2497][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705015117664916:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:51.300345Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7dh0bwdy9r9avfsnms6h", Request has 18444976026478.251333s seconds to be completed 2026-01-10T12:13:51.300523Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705036592501982:2498][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705015117664916:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:13:51.305337530 229166 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-10T12:13:51.305211Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705036592501983:2499][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705015117664916:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:13:51.305552108 229166 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.305749Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7dh0bwdy9r9avfsnms6h", Created new session, sessionId: ydb://session/3?node_id=1&id=ZTk0NTY4NzAtZTRhNGJmMmQtYjU1YTU4NC1hYzVkOTBmMw==, workerId: [1:7593705036592502006:2328], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:13:51.306042Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7dh0bwdy9r9avfsnms6h 2026-01-10T12:13:51.306113Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:51.306138Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:51.306178Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0110 12:13:51.307984619 229166 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.308174917 229166 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.309716Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:51.320943162 229166 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.321084546 229166 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.322456Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:51.335801767 229166 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:51.335919419 229166 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:51.337287Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:13:51.349172Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7gbmfchatkqznvrdnfzv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk0NTY4NzAtZTRhNGJmMmQtYjU1YTU4NC1hYzVkOTBmMw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705 ... ROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL 2026-01-10T12:14:58.492026Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705322792850944:2351] TxId: 281474976710662. Ctx: { TraceId: 01kekx9h6v8epcthc0mjm9j97h, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlY2MzOTAtZThhYWNjNWYtMWExNzQ0ODQtOTA2NGVhZmY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:14:58.504614Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9h6v8epcthc0mjm9j97h, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlY2MzOTAtZThhYWNjNWYtMWExNzQ0ODQtOTA2NGVhZmY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705322792850948:2366] 2026-01-10T12:14:58.505187Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9h6v8epcthc0mjm9j97h, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2VlY2MzOTAtZThhYWNjNWYtMWExNzQ0ODQtOTA2NGVhZmY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705322792850949:2367] 2026-01-10T12:14:58.507498Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9h6v8epcthc0mjm9j97h", Forwarded response to sender actor, requestId: 4, sender: [9:7593705318497883544:2350], selfId: [9:7593705266958275152:2179], source: [9:7593705318497883545:2351] 2026-01-10T12:14:58.510267Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Y2VlY2MzOTAtZThhYWNjNWYtMWExNzQ0ODQtOTA2NGVhZmY=, workerId: [9:7593705318497883545:2351], local sessions count: 0 E0110 12:14:58.737303766 250385 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:58.737521233 250385 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:58.768854Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |89.3%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::UpdatePk |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> KqpYql::BinaryJsonOffsetBound >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin >> KqpScripting::StreamScanQuery >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpScripting::EndOfQueryCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 15342, MsgBus: 26584 2026-01-10T12:14:56.233532Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705316641239542:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:56.233583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:56.967479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:56.974640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:56.974780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:57.275711Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:57.300242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:57.361605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:57.368271Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705316641239522:2081] 1768047296212132 != 1768047296212135 2026-01-10T12:14:57.390403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:57.598306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:57.598329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:57.598337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:57.598454Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:57.803227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:58.376449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:58.409432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:01.236283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705316641239542:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:01.236362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:01.573288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705338116076903:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:01.573426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:01.573921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705338116076913:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:01.573966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.300499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:02.585029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705342411044309:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.585187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.585763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705342411044314:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.585827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705342411044315:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.586140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:02.591115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:02.625071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705342411044318:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:15:02.820503Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705342411044369:2605] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] Test command err: 2026-01-10T12:14:44.992795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705263091199962:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:44.993017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:44.999230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:45.564172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:45.635762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:45.636099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:45.784866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:45.822296Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705263091199727:2081] 1768047284921228 != 1768047284921231 2026-01-10T12:14:45.829636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:45.849186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:45.924243Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:46.172503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:46.189052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:47.216955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:47.453132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:49.969002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705263091199962:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:49.969075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:51.849890Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705295771149822:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:51.892815Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:51.984588Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:52.362541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:52.425531Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:52.432001Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705295771149693:2081] 1768047291781815 != 1768047291781818 2026-01-10T12:14:52.439050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:52.439162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:52.470683Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:52.785985Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:52.904200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:53.986878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:53.991716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:55.003696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.849784Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705295771149822:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:56.849873Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:58.236322Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7593705325835921797:2609], ActorId: [2:7593705325835921798:2609], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=OTA0MjJkY2UtMTIxOGYwOTYtNDRjMzg0NWUtNWMwNzUzNmE=, TxId: 01kekx9hnjc6wqd4j6gss00mga 2026-01-10T12:14:58.240115Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705325835921820:2330], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:58.240199Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:58.241007Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705325835921831:2331], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:58.241069Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |89.3%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] Test command err: 2026-01-10T12:14:46.592894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705272447112387:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:46.593034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:47.399030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:47.399149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:47.459285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:47.614369Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:47.684981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:47.695089Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705272447112350:2081] 1768047286568735 != 1768047286568738 2026-01-10T12:14:47.743371Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:47.847792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:48.328930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:49.395341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:51.596020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705272447112387:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:51.596129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:53.787556Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7593705302511884463:2608], ActorId: [1:7593705302511884464:2608], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=Y2YxOTAzMDAtZjYyNjRlYjktODc3MTJiZmUtODgxODYzNzI=, TxId: 01kekx9dace35tbqb7pny2whn4 2026-01-10T12:14:53.792892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705302511884487:2332], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:53.793324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:53.800286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705302511884500:2333], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:53.800376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |89.3%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] Test command err: 2026-01-10T12:14:44.056105Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705263003324270:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:44.056420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:44.587775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:44.587870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:44.704862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:44.751216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:44.838563Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:44.840257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705258708356851:2081] 1768047284012451 != 1768047284012454 2026-01-10T12:14:44.928211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:45.051624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:45.274478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:45.305087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:46.380828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:49.048299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705263003324270:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:49.048385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:52.441202Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7593705297363063566:2617], ActorId: [1:7593705297363063567:2617], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=OGEyMjAyMi03OGVlYmMxNS0zOTZkNDU3NS1hMGE5ZmU0OQ==, TxId: 01kekx9c0369k7p2f9z51hjkgh 2026-01-10T12:14:52.445158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705297363063588:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.445246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.446518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705297363063599:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.446639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:52.738270Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705300717948566:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:52.738312Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:52.823404Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:53.103007Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:14:53.107009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:53.107099Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:53.167213Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705300717948542:2081] 1768047292711726 != 1768047292711729 2026-01-10T12:14:53.219567Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:53.232798Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:53.395570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:14:53.710401Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:54.385670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:54.397976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:14:55.415624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:57.711586Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7593705322192786044:2599], ActorId: [2:7593705322192786045:2599], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=YmRmMzMzYWItYzQ0NDhjYS1hZDUzMWI1MS02ZDMyY2VjZg==, TxId: 01kekx9h51atrzb6twt9sdzyhj 2026-01-10T12:14:57.720384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705322192786066:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.720526Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.728594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705322192786079:2328], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.729246Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.740314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705300717948566:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:57.740389Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |89.3%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> KqpYql::TableUseBeforeCreate >> KqpYql::InsertCVList-useSink >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |89.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |89.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |89.4%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin >> TBoardSubscriber2DCTest::ReconnectReplica >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 62952, MsgBus: 19410 2026-01-10T12:13:53.828299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705044168107570:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:53.828363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:54.110145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:54.134564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:54.134875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:54.214315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:54.272200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705044168107324:2081] 1768047233789082 != 1768047233789085 2026-01-10T12:13:54.276554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:54.301560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:54.434239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:54.434267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:54.434282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:54.434396Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:54.827488Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:55.126058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:55.126499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:55.130126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:55.130915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:55.139957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047235178, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:55.148808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-10T12:13:55.148878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2026-01-10T12:13:55.149299Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705048463075149:2245] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-10T12:13:55.149587Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705044168107292:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:55.149717Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705044168107295:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:55.149794Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705044168107298:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:55.149960Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705048463075270:2289][/Root] Path was updated to new version: owner# [1:7593705048463075258:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:55.150246Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705048463075269:2288][/Root] Path was updated to new version: owner# [1:7593705048463075257:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:55.157246Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705048463075059:2199][/Root] Path was updated to new version: owner# [1:7593705044168107612:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:55.159298Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705048463075149:2245] Ack update: ack to# [1:7593705044168107677:2147], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:55.160025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2026-01-10T12:13:55.160734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:13:58.216108Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:58.217897Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010dd/r3tmp/spilling-tmp-runner/node_1_e14cbd15-6a3c5ee9-980a438d-86a1810d, actor: [1:7593705065642944657:2307] 2026-01-10T12:13:58.218032Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010dd/r3tmp/spilling-tmp-runner E0110 12:13:58.234098904 231935 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.234307555 231935 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:13:58.241452402 231935 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.241668875 231935 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.236165Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7m3s2fv5r0tj6ztxs6kf", Request has 18444976026471.315510s seconds to be completed 2026-01-10T12:13:58.263029Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705065642944682:2502][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705044168107612:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.263317Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705065642944680:2500][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705044168107612:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.266999Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7m3s2fv5r0tj6ztxs6kf", Created new session, sessionId: ydb://session/3?node_id=1&id=NGVhZjA5NzctMzMzMTFjOWEtZmVkODY4OWUtZjk5NTFjYWM=, workerId: [1:7593705065642944700:2329], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:13:58.272806Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7m3s2fv5r0tj6ztxs6kf 2026-01-10T12:13:58.272979Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:58.273008Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:58.273038Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:58.281988Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705065642944681:2501][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705044168107612:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.282755Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:58.292960006 231935 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.293121705 231935 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.294710Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:58.315543679 231934 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.315657946 231934 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.317260Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7q5cesjege8c3rkhy2rc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGVhZjA5NzctMzMzMTFjOWEtZmVkODY4OWUtZjk5NTFjYWM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705065642944700:2329] 2026- ... credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:15:05.304281Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705354326427989:2345] TxId: 281474976710662. Ctx: { TraceId: 01kekx9qx32mxd7zvwwwv17nw2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGEyNWIxYWYtNGQ4Y2RjZmEtNDE0NGY4NmYtOTRjZjFlMzE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:15:05.327143Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9qx32mxd7zvwwwv17nw2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGEyNWIxYWYtNGQ4Y2RjZmEtNDE0NGY4NmYtOTRjZjFlMzE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705354326427994:2361] 2026-01-10T12:15:05.327244Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9qx32mxd7zvwwwv17nw2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGEyNWIxYWYtNGQ4Y2RjZmEtNDE0NGY4NmYtOTRjZjFlMzE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705354326427993:2360] 2026-01-10T12:15:05.328345Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9qx32mxd7zvwwwv17nw2", Forwarded response to sender actor, requestId: 4, sender: [9:7593705350031460589:2344], selfId: [9:7593705315671721392:2182], source: [9:7593705350031460590:2345] 2026-01-10T12:15:05.329588Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NGEyNWIxYWYtNGQ4Y2RjZmEtNDE0NGY4NmYtOTRjZjFlMzE=, workerId: [9:7593705350031460590:2345], local sessions count: 0 E0110 12:15:05.341289317 253522 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:05.341494416 253522 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:05.344764Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } >> TBoardSubscriberTest::SimpleSubscriber >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::DropByDisconnect >> KqpYql::NonStrictDml [GOOD] >> KqpYql::PgIntPrimaryKey |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> Channels20::IcChannelLateBinding [GOOD] >> Channels20::IcChannelAsyncRead >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 5518, MsgBus: 23632 2026-01-10T12:13:54.389479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705052082865920:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:54.390528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:54.826596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:54.826692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:54.947951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:54.979437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:54.983429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705052082865882:2081] 1768047234340409 != 1768047234340412 2026-01-10T12:13:55.028639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:55.269065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:55.281596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:55.281622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:55.281628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:55.281703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:55.391051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:56.177125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:56.177491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:56.179160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:56.179836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:56.189865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047236235, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:56.190797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:13:56.190840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-10T12:13:56.191010Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705052082866414:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:13:56.191144Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705052082865856:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.191329Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705052082866337:2206][/Root] Path was updated to new version: owner# [1:7593705052082866153:2108], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.191725Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705052082865850:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.191806Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705052082865853:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.192569Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705056377833826:2289][/Root] Path was updated to new version: owner# [1:7593705056377833820:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.192762Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705056377833825:2288][/Root] Path was updated to new version: owner# [1:7593705056377833819:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.201435Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705052082866414:2248] Ack update: ack to# [1:7593705052082866242:2149], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:56.201670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:13:59.018475Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:59.020513Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705073557703220:2501][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705052082866153:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:59.021554Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010da/r3tmp/spilling-tmp-runner/node_1_f5d8a3ca-954abeb5-6495f567-1a01f060, actor: [1:7593705073557703228:2309] 2026-01-10T12:13:59.021774Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010da/r3tmp/spilling-tmp-runner 2026-01-10T12:13:59.022905Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7n3z1b302c8nf2j2vksd", Request has 18444976026470.528742s seconds to be completed 2026-01-10T12:13:59.026760Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7n3z1b302c8nf2j2vksd", Created new session, sessionId: ydb://session/3?node_id=1&id=YmRiYWUyMTAtMzE1MDA2NjEtYjRhYWI5MzMtZDEzMzU5NTE=, workerId: [1:7593705073557703247:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:13:59.027005Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7n3z1b302c8nf2j2vksd 2026-01-10T12:13:59.027056Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:59.027087Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:59.027522Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:59.033196Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705073557703249:2503][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705052082866153:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:59.034129Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705073557703250:2504][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705052082866153:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:13:59.034963485 232180 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:59.035117719 232180 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:13:59.044943607 232180 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:59.045089424 232180 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:59.048937Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:59.056597497 232180 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:59.056796158 232180 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:59.058432Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:59.076536223 232179 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:59.076651729 232179 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:59.081260Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:59.112942375 232180 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:59.113062394 232180 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:59.109471Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7qy4ewhpwzjc4b3q7zcs, Database: /Root, SessionId: ydb://session/3?no ... atusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0110 12:15:07.728953815 254349 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:07.729138223 254349 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:07.731039Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:15:08.127954Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705368718471558:2342] TxId: 281474976710662. Ctx: { TraceId: 01kekx9tkv9kgcew3j8ar00nfy, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDAyMWQ2MDctNDZhZDJlZTEtZmU3N2QwYjQtODU4M2M1NDA=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:15:08.133557Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9tkv9kgcew3j8ar00nfy, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDAyMWQ2MDctNDZhZDJlZTEtZmU3N2QwYjQtODU4M2M1NDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705368718471563:2356] 2026-01-10T12:15:08.134795Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9tkv9kgcew3j8ar00nfy, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NDAyMWQ2MDctNDZhZDJlZTEtZmU3N2QwYjQtODU4M2M1NDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705368718471564:2357] 2026-01-10T12:15:08.135725Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9tkv9kgcew3j8ar00nfy", Forwarded response to sender actor, requestId: 4, sender: [9:7593705364423504160:2341], selfId: [9:7593705325768797698:2211], source: [9:7593705364423504162:2342] 2026-01-10T12:15:08.137371Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NDAyMWQ2MDctNDZhZDJlZTEtZmU3N2QwYjQtODU4M2M1NDA=, workerId: [9:7593705364423504162:2342], local sessions count: 0 E0110 12:15:08.315085220 254349 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:08.315296764 254349 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:08.412967Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> Channels20::LocalChannelAsyncRead [GOOD] >> Channels20::IcChannelTrivial >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown >> TBoardSubscriber2DCTest::NotAvailableByShutdown >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin >> TBoardSubscriberTest::ManySubscribersManyPublisher |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> KqpYql::UpdatePk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15203, MsgBus: 27455 2026-01-10T12:13:54.456527Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705050305901508:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:54.456597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:54.491015Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.008405s 2026-01-10T12:13:54.879372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:54.879496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:54.894554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:54.927190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:54.956260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705050305901481:2081] 1768047234452544 != 1768047234452547 2026-01-10T12:13:54.956949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:55.105924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:55.112673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:55.112700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:55.112725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:55.112858Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:55.475388Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:56.021540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:13:56.021972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:13:56.024036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:56.040733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:13:56.043876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047236088, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:13:56.045627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:13:56.045695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-10T12:13:56.045908Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705050305902006:2246] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:13:56.046358Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705050305901449:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.046503Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705050305901452:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.046541Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705050305901455:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:13:56.046768Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705050305901956:2216][/Root] Path was updated to new version: owner# [1:7593705050305901770:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.047095Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705050305902006:2246] Ack update: ack to# [1:7593705050305901842:2150], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:13:56.047223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-10T12:13:56.048305Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705050305902080:2288][/Root] Path was updated to new version: owner# [1:7593705050305902074:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.048584Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705050305902081:2289][/Root] Path was updated to new version: owner# [1:7593705050305902075:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:56.056498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:58.518473Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:13:58.521071Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705067485771516:2499][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705050305901770:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.524068Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010db/r3tmp/spilling-tmp-runner/node_1_b561dd07-deb169c-d508ad4f-71807c21, actor: [1:7593705067485771524:2309] 2026-01-10T12:13:58.524342Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010db/r3tmp/spilling-tmp-runner 2026-01-10T12:13:58.530049Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx7mzr2a2gwqzxytwqg522", Request has 18444976026471.021614s seconds to be completed 2026-01-10T12:13:58.532445Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705067485771544:2501][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705050305901770:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.537748Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx7mzr2a2gwqzxytwqg522", Created new session, sessionId: ydb://session/3?node_id=1&id=ZmY5ZDhlNjEtN2ViNWZhYjYtZjQzYTcyODUtYzc5MTZmMDQ=, workerId: [1:7593705067485771558:2328], database: /Root, longSession: 1, local sessions count: 1 E0110 12:13:58.539294591 232300 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.539466081 232300 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.538375Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705067485771545:2502][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705050305901770:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:13:58.538392Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx7mzr2a2gwqzxytwqg522 2026-01-10T12:13:58.538467Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:13:58.538497Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:13:58.538531Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0110 12:13:58.550402804 232300 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.550616413 232300 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.552159Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:58.558893824 232300 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.559045191 232300 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.560157Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:13:58.575495728 232299 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:13:58.575637457 232299 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:13:58.578326Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:13:58.599088Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx7 ... pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0110 12:15:10.760774484 254854 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:10.761004074 254854 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:10.768456Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:15:10.940961Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705375376777200:2350] TxId: 281474976710663. Ctx: { TraceId: 01kekx9xh21q96z4kq74wcrzkc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGY1NjcwYy03MmFlY2RiNC1hOTU2MmY1NC0yYWJjZTc2NA==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:15:10.956641Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01kekx9xh21q96z4kq74wcrzkc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGY1NjcwYy03MmFlY2RiNC1hOTU2MmY1NC0yYWJjZTc2NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705375376777204:2365] 2026-01-10T12:15:10.956809Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01kekx9xh21q96z4kq74wcrzkc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGY1NjcwYy03MmFlY2RiNC1hOTU2MmY1NC0yYWJjZTc2NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705375376777205:2366] 2026-01-10T12:15:10.958593Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekx9xh21q96z4kq74wcrzkc", Forwarded response to sender actor, requestId: 4, sender: [9:7593705375376777094:2349], selfId: [9:7593705332427103373:2265], source: [9:7593705375376777095:2350] 2026-01-10T12:15:10.960028Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=OGY1NjcwYy03MmFlY2RiNC1hOTU2MmY1NC0yYWJjZTc2NA==, workerId: [9:7593705375376777095:2350], local sessions count: 0 >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> TBoardSubscriberTest::ReconnectReplica >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 15226, MsgBus: 13452 2026-01-10T12:15:04.305150Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705350158729306:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:04.305210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:04.408989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:05.022255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:05.022363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:05.055943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:05.068213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705350158729253:2081] 1768047304290916 != 1768047304290919 2026-01-10T12:15:05.082016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:05.086995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:05.320441Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:05.340208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:05.356706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:05.356727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:05.356733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:05.356838Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:06.190442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:06.226038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:06.316392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.632263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.916400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.011929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:09.308532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705350158729306:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:09.308634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:10.126189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705375928534915:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.126318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.140958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705375928534925:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.141056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.532339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.564516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.598295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.635240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.726856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.816940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.862080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.948867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.064946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380223503098:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.065039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.065889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380223503104:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.065938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.066031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380223503103:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.070548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:11.085118Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705380223503107:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:11.176324Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705380223503158:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At lambda, At function: AsStruct, At tuple
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> KqpYql::EvaluateExprYsonAndType >> YdbProxy::CreateTopic >> KqpYql::TableUseBeforeCreate [GOOD] |89.5%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> PartitionEndWatcher::EmptyPartition |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |89.5%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> KqpYql::InsertCVList-useSink [GOOD] >> KqpYql::InsertIgnore >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 3884, MsgBus: 21293 2026-01-10T12:15:00.596661Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705333998898836:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:00.598047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:01.453199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:01.487321Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:01.487543Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705333998898731:2081] 1768047300534284 != 1768047300534287 2026-01-10T12:15:01.515325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:01.515458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:01.561415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:01.604093Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:01.696248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:01.965083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:01.965109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:01.965116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:01.965206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:03.188566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:03.346083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:03.772384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:03.973860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:04.068489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.580351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705333998898836:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:05.580523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:06.350333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705359768704394:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.350480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.350832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705359768704404:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.350898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.701881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.733959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.771158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.804594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.840376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.881545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.924754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.995442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.160125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705364063672573:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.160214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.160706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705364063672578:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.160758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705364063672579:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.160788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.165496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:07.179715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:15:07.184893Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705364063672582:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:07.258447Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705364063672633:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19326, MsgBus: 4369 2026-01-10T12:15:10.368542Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705378392579090:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:10.368694Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:10.398118Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:10.501379Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:10.503087Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705378392579032:2081] 1768047310357905 != 1768047310357908 2026-01-10T12:15:10.530072Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:10.530159Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:10.551728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:10.576306Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:10.844724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:10.844748Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:10.844756Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:10.844858Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:11.384247Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:11.851744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:11.859323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:14.611309Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449100:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.612179Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.614039Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449118:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.614489Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.614874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449121:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.614933Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.625752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.680414Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449204:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.680538Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.684401Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449210:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.684724Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705395572449209:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.684780Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.689226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:14.700038Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705395572449213:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:15:14.785651Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705395572449264:2592] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:15.376199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705378392579090:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:15.376277Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> YdbProxy::ReadTopic >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 62757, MsgBus: 31429 2026-01-10T12:15:07.283869Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705363120091250:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:07.285173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:07.925391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:07.925524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:07.927712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:08.042534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:08.247354Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:08.252372Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705363120091216:2081] 1768047307243498 != 1768047307243501 2026-01-10T12:15:08.292072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:08.309196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:08.488709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:08.488737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:08.488744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:08.488820Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:09.499359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:09.511564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:09.613802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:09.883438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.078893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.194161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.250429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705363120091250:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:12.250515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:12.882294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705384594929578:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.882425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.883030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705384594929588:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.883146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.416738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.478561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.525130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.561153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.600473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.662226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.726397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.807675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.918224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705388889897766:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.918323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.918665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705388889897771:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.918737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705388889897772:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.918901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.923427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:13.943586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705388889897775:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:14.046116Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705393184865122:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> YdbProxy::DescribePath >> AsyncIndexChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow >> KqpScripting::ExecuteYqlScriptPg [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |89.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 11909, MsgBus: 5447 2026-01-10T12:15:03.634399Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705346729674485:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:03.634439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:04.160332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:04.238064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:04.238183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:04.274527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:04.398779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:04.420200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705346729674461:2081] 1768047303627550 != 1768047303627553 2026-01-10T12:15:04.438059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:04.521131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:04.521156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:04.521163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:04.521280Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:04.740871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:05.297944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:05.304687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:05.353578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.527324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.702034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.864296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.015836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705368204512817:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.015928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.017927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705368204512827:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.018031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.369280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.415647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.461796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.516907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.562600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.636379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705346729674485:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:08.636451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:08.648647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.698543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.776600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.893327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705368204513708:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.893420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.893925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705368204513713:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.893967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705368204513714:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.894027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:08.897891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:08.913184Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705368204513717:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:08.987139Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705368204513770:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:12.109938Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047312115, ... , got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:13.616397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:13.635305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:13.635396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:13.644366Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705389193469067:2081] 1768047313291950 != 1768047313291953 2026-01-10T12:15:13.645736Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:13.661177Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:13.837551Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:13.894987Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:13.895016Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:13.895024Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:13.895099Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:14.399611Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:14.527566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:14.534881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:14.544915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.624892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.823389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.911679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.639855Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705406373340120:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.639978Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.640536Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705406373340130:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.640608Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.765367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.811794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.882971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.968002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.018983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.080543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.150877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.237496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.334065Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705389193469193:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:18.334201Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:18.413594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705410668308303:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.413716Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.414217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705410668308308:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.414266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705410668308309:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.414373Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.419126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:18.441098Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705410668308312:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:18.500817Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705410668308363:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:21.576869Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047321579, txId: 281474976715673] shutting down 2026-01-10T12:15:22.096137Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047322118, txId: 281474976715675] shutting down 2026-01-10T12:15:22.519667Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047322538, txId: 281474976715677] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-10T12:12:37.692194Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1768047157692161 2026-01-10T12:12:38.160211Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704724280823270:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:38.160301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:38.271362Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.350734Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704722259686721:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:38.350810Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:38.415340Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:38.415885Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:38.816954Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.834582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:38.883512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.883670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.919001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:38.919127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:38.979824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:38.991347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:39.012026Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:39.012709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:39.060029Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:39.072999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:39.233589Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:39.292911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0029f3/r3tmp/yandexoeuOrW.tmp 2026-01-10T12:12:39.292939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0029f3/r3tmp/yandexoeuOrW.tmp 2026-01-10T12:12:39.293115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0029f3/r3tmp/yandexoeuOrW.tmp 2026-01-10T12:12:39.293204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:39.327375Z INFO: TTestServer started on Port 20385 GrpcPort 28066 2026-01-10T12:12:39.332241Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:28066 2026-01-10T12:12:39.599169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:43.042161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745755661053:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.042346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.042950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745755661081:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.047386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:43.057466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704745755661115:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.058064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:43.096543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704745755661083:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-10T12:12:43.160200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704724280823270:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.160290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:43.340186Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593704722259686721:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:43.340268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:43.496668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:43.497606Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704745755661173:2976] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:43.506002Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593704743734523340:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:43.508656Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjMyM2RhN2UtOTVlNmU1YjMtYWE1YmNmNjUtNTM0ZGIyNzk=, ActorId: [2:7593704743734523303:2302], ActorState: ExecuteState, LegacyTraceId: 01kekx5dny29m3c2641534bbtd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:12:43.511229Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:12:43.535865Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593704745755661243:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:12:43.536447Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjQ5NDMzZjItZGZmYmU3OTMtNWMxMDFlZjgtZjJiNGZjOGE=, ActorId: [1:7593704745755661050:2329], ActorState: ExecuteState, LegacyTraceId: 01kekx5dmx0nzx1ne76r6nwkv7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } mes ... 15.882612Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:15:15.882653Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:15:15.984261Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:15:15.984304Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:15:15.984323Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:15:15.984349Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:15:15.984373Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:15:16.083387Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:15:16.083428Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:15:16.083450Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:15:16.083478Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:15:16.083501Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:15:16.146556Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [15:7593705397032631109:2462] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-10T12:15:16.146614Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [15:7593705397032631109:2462] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2026-01-10T12:15:16.146637Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [15:7593705397032631109:2462] (SourceId=src, PreferedPartition=(NULL)) Start idle 2026-01-10T12:15:16.146674Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-10T12:15:16.149989Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [15:7593705401327598498:2462], now have 1 active actors on pipe 2026-01-10T12:15:16.152136Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:15:16.152176Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:15:16.152279Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-01-10T12:15:16.152327Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:15:16.152347Z node 16 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:15:16.152405Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:15:16.152422Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:15:16.152454Z node 16 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:15:16.152495Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:15:16.152515Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:15:16.152537Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:15:16.152588Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-10T12:15:16.152682Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:15:16.151115Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2026-01-10T12:15:16.155856Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-10T12:15:16.155902Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-10T12:15:16.156003Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:15:16.164402Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 2026-01-10T12:15:16.168768Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1768047316168 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:15:16.168933Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-10T12:15:16.169246Z :INFO: [] MessageGroupId [src] SessionId [src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0] Write session: close. Timeout = 0 ms 2026-01-10T12:15:16.169310Z :INFO: [] MessageGroupId [src] SessionId [src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0] Write session will now close 2026-01-10T12:15:16.169373Z :DEBUG: [] MessageGroupId [src] SessionId [src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0] Write session: aborting 2026-01-10T12:15:16.169911Z :INFO: [] MessageGroupId [src] SessionId [src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:15:16.169971Z :DEBUG: [] MessageGroupId [src] SessionId [src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0] Write session: destroy 2026-01-10T12:15:16.186204Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 grpc read done: success: 0 data: 2026-01-10T12:15:16.186238Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 grpc read failed 2026-01-10T12:15:16.186275Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 grpc closed 2026-01-10T12:15:16.186300Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|6676dc39-bab2df37-ec81e7cb-11c8dc53_0 is DEAD 2026-01-10T12:15:16.186826Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:15:16.263158Z :INFO: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Starting read session 2026-01-10T12:15:16.263221Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Starting cluster discovery 2026-01-10T12:15:16.263534Z :INFO: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23621: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23621
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23621. " 2026-01-10T12:15:16.263592Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Restart cluster discovery in 0.006916s 2026-01-10T12:15:16.271967Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Starting cluster discovery 2026-01-10T12:15:16.273431Z :INFO: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23621: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23621
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23621. " 2026-01-10T12:15:16.273522Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Restart cluster discovery in 0.014600s 2026-01-10T12:15:16.290449Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Starting cluster discovery 2026-01-10T12:15:16.290710Z :INFO: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23621: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23621
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23621. " 2026-01-10T12:15:16.290747Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Restart cluster discovery in 0.035498s 2026-01-10T12:15:16.328286Z :DEBUG: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Starting cluster discovery 2026-01-10T12:15:16.328720Z :NOTICE: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23621: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23621
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23621. " } 2026-01-10T12:15:16.328990Z :NOTICE: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23621: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23621
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23621. " } 2026-01-10T12:15:16.329174Z :INFO: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Closing read session. Close timeout: 0.000000s 2026-01-10T12:15:16.329321Z :NOTICE: [/Root] [/Root] [8d4fe143-7080333-d2f4a3fa-b79c6708] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin |89.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system >> YdbProxy::AlterTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 19906, MsgBus: 25055 2026-01-10T12:15:05.688652Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705354413135972:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:05.708970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:06.128189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:06.148970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:06.149429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:06.170723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:06.283260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:06.373633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:06.415906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:06.415932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:06.415941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:06.416041Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:06.732241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:06.952682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:06.964951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:07.045170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.236207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.495331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.648520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:09.878197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705371593006973:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:09.878326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:09.878800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705371593006982:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:09.878846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.316654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.371035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.433510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.516519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.581150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.647026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.689249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705354413135972:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:10.690087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:10.721621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.817401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.100650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380182942453:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.100727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.101248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380182942458:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.101293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705380182942459:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.101579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.105809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:11.125768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:15:11.126255Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705380182942462:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:11.201981Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705380182942514:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:13.494416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchem ... 15.244653Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705395887257180:2180];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:15.249644Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:15.259631Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:15.324010Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:15.450931Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705395887257038:2081] 1768047315231923 != 1768047315231926 2026-01-10T12:15:15.457900Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:15.470450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:15.470532Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:15.475384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:15.660341Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:15.720778Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:15.720800Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:15.720809Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:15.720888Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:16.247126Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:16.350334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:16.357047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:16.375598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:16.495639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:16.641665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:16.736184Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.332770Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705413067128093:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:19.332869Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:19.333340Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705413067128103:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:19.333396Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:19.500883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.589344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.649742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.701257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.763240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.866457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.062798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.227793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.244376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705395887257180:2180];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:20.244443Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:20.457618Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705417362096272:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.457773Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.458305Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705417362096277:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.458355Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705417362096278:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.458462Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.463600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:20.497207Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705417362096281:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:20.593166Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705417362096333:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser >> Channels20::IcChannelAsyncRead [GOOD] >> Channels20::IcChannelEarlyFinish >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> KqpYql::AnsiIn [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> Channels20::IcChannelTrivial [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |89.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpYql::EvaluateFor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2026-01-10T12:15:19.839224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705416233981170:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:19.839292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:20.464167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:20.970412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:20.970514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:21.013696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:21.052891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:21.127516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:21.146413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:21.908821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:21.908856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:21.908865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:21.908968Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:22.766977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:24.840311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705416233981170:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:24.840413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:25.997879Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705442003785602:2512] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-10T12:15:26.043560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:26.216913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:15:26.263153Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705446298753017:2594] txid# 281474976715661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> CdcStreamChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin >> KqpYql::InsertIgnore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 27315, MsgBus: 17635 2026-01-10T12:15:05.071151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705356412428802:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:05.071338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:05.905739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:05.905863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:05.931591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:06.087113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:06.087214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:06.098937Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:06.104675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705356412428619:2081] 1768047305054468 != 1768047305054471 2026-01-10T12:15:06.342327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:06.352744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:06.352781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:06.352788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:06.352895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:07.400986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:07.555222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.906266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.181902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.301170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.072209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705356412428802:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:10.072303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:11.036225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382182234280:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.036327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.040177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382182234290:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.040245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.406105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.454819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.485173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.515940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.551669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.616713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.686719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.766867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.869721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382182235163:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.869812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.870304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382182235168:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.870343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382182235169:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.870583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.874536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:11.890853Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705382182235172:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:11.993878Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705382182235223:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:15.397468Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047315384, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 25359, MsgBus: 62771 2026-01-10T12:15:16.911681Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705403221398507:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:16.911753Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:16.952486Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:17.172276Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:17.241476Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:17.241565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:17.248449Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705403221398295:2081] 1768047316892392 != 1768047316892395 2026-01-10T12:15:17.315724Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:17.336350Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:17.368190Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:17.748630Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:17.748652Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:17.748659Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:17.748729Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:17.908046Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:18.538054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:18.543349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:18.561609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.699841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.136262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.278271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.916216Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705403221398507:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:21.916301Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:23.568211Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705433286171242:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.568315Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.572223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705433286171252:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.572329Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.647146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.693453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.733168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.772975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.820617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.882989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.929218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.001854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.155602Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705437581139428:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.155701Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.156012Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705437581139433:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.156066Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705437581139434:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.156178Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.161389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:24.173955Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705437581139437:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:24.235142Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705437581139488:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 24913, MsgBus: 30649 2026-01-10T12:15:04.990824Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705351359490415:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:04.991253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:05.044090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:05.512971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:05.552646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:05.552752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:05.725600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:05.732451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:05.732929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705351359490312:2081] 1768047304925954 != 1768047304925957 2026-01-10T12:15:05.811451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:05.984574Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:06.132761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:06.132781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:06.132788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:06.132869Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:07.169703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:07.273086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.548113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.889287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.028687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:09.957099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705351359490415:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:09.957180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:10.737256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705377129295973:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.737416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.742984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705377129295983:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.743072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.188560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.260472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.338377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.396224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.433475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.493938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.586587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.673676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.794671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705381424264161:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.794774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.801928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705381424264166:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.802031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705381424264167:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.802307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.806544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:11.825421Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705381424264170:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:11.922195Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705381424264221:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:14.144181Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3404} SessionId: ydb://session/3?node_id ... 15.627125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:15.628118Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705396554935166:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:15.648249Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:15.834400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:15.843969Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:15.846370Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705396554934929:2081] 1768047315590589 != 1768047315590592 2026-01-10T12:15:15.873292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:15.873381Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:15.883843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:16.277065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:16.356678Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:16.356707Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:16.356715Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:16.356792Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:16.590182Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:17.531914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:17.538672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:17.547610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.662757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.861463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.967442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.616382Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705396554935166:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:20.616453Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:23.199733Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705430914675200:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.199802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.200200Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705430914675209:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.200240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.293344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.340236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.413438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.455975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.500501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.567637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.632752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.713718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:23.830747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705430914676099:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.830863Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.831150Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705430914676104:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.831223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705430914676105:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.831276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.837096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:23.874049Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705430914676108:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:23.936265Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705430914676159:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2026-01-10T12:15:18.006958Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705405275247687:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:18.007608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:18.423887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:18.424010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:18.479755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:18.512779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705405275247642:2081] 1768047317945175 != 1768047317945178 2026-01-10T12:15:18.542457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:18.545125Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:18.750434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:19.023475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:19.153823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:19.153846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:19.153854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:19.153947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:20.213759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:20.232140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:20.323203Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705418160150371:2492] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2026-01-10T12:15:24.157913Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705436504545162:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:24.158065Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:24.200003Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:24.304234Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:24.304293Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:24.307384Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:24.317710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:24.473950Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:24.655245Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:24.655266Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:24.655273Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:24.655358Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:24.937167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:24.944928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:25.168598Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> Cdc::KeysOnlyLog[YdsRunner] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertIgnore [GOOD] Test command err: Trying to start YDB, gRPC: 27779, MsgBus: 62172 2026-01-10T12:15:07.396347Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705361474538074:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:07.396423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:07.477916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:07.999796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:07.999898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:08.107202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:08.140977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:08.219079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:08.273363Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705361474537955:2081] 1768047307363370 != 1768047307363373 2026-01-10T12:15:08.399492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:08.484467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:08.484487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:08.484495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:08.484589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:08.500170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:09.602721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:09.615449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:09.733866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.143833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.378846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.510760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.400505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705361474538074:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:12.400574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:12.963700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382949376319:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.963840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.972289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382949376330:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.972400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:13.615997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.678536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.758295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.810614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.852096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.913733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:13.978227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.107815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:14.266543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705391539311814:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.266658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.267235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705391539311819:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.267292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705391539311820:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.267319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:14.272841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:14.295216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:15:14.295492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705391539311823:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:14.398392Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705391539311874:3787] txid# 281474976710671, ... 91374:2244];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:19.585543Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:19.664466Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:19.932805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:19.948870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:19.948952Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:19.972254Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705415829191168:2081] 1768047319554895 != 1768047319554898 2026-01-10T12:15:20.066621Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:20.103825Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:20.352183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:20.368099Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:20.368123Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:20.368131Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:20.368210Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:20.572265Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:21.443242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:21.453636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:21.463803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.627426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:22.166704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:22.520111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.576434Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705415829191374:2244];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:24.576497Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:25.272251Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705441598996816:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.272374Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.272843Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705441598996826:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.272895Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.406367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.454313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.517026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.571577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.613380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.669746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.725680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.797802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.893687Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705441598997697:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.893776Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.894109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705441598997703:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.894171Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705441598997702:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.894203Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.898979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:25.915059Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705441598997706:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:26.001939Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705441598997757:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. >> Cdc::UuidExchange[PqRunner] >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey >> Cdc::KeysOnlyLog[PqRunner] >> YdbProxy::DescribeTable [GOOD] >> AsyncIndexChangeCollector::DeleteNothing >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system >> KqpScan::ScanPg [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] Test command err: Trying to start YDB, gRPC: 26783, MsgBus: 9937 2026-01-10T12:14:48.494366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705282983869045:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:48.516495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:48.559187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:49.272302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:49.325194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:49.325307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:49.517310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:49.541672Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:49.765720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:49.792275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705282983868826:2081] 1768047288434313 != 1768047288434316 2026-01-10T12:14:49.844128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:49.908175Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.008101s 2026-01-10T12:14:50.107568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:50.107594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:50.107602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:50.107964Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:51.201055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:51.224646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:14:51.380479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:51.643398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:52.111795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:52.358344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:53.494010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705282983869045:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:53.494125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:55.479829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705313048641789:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:55.479995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:55.496920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705313048641799:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:55.497016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:55.497517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705313048641801:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:55.497570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:56.281685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.356528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.458214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.563043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.648765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.758905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.853676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:56.944148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:57.196575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705321638577280:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.196680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.196989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705321638577285:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.197021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.197065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705321638577286:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:57.201551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 7205 ... 4.970861Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:14.970954Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:14.996966Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:14.997063Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:15.017160Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:15.110812Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:15:15.116057Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:15.248222Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:15.285815Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:15.302212Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:15.357671Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:15.357696Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:15.357704Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:15.357784Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:15.532345Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:16.582247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:16.647876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:16.894072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.133157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:17.390422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:19.160210Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593705394461388625:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:19.160301Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:19.252266Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593705393000270971:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:19.252350Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:20.462099Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705420231194835:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.462190Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.462721Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705420231194845:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.462787Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:20.688688Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.876947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.013360Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.160347Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.310737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.546854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:21.725409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:22.372782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:22.630407Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705428821130644:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:22.630525Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:22.636616Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705428821130649:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:22.636715Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705428821130650:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:22.637041Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:22.642573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:22.698689Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593705428821130658:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:22.796886Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593705428821130739:4904] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex |89.7%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2026-01-10T12:15:21.669020Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705423432787539:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:21.669484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:22.371279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:22.412489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:22.412605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:22.547289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705423432787319:2081] 1768047321571113 != 1768047321571116 2026-01-10T12:15:22.573819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:22.586814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:22.663373Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:22.676273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:23.332363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:23.332390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:23.332397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:23.332494Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:23.789223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:23.805222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:23.890468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:15:27.536180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:27.552267Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:27.763779Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705449091755380:2081] 1768047327378355 != 1768047327378358 2026-01-10T12:15:27.774376Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:27.784267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:27.840383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:27.840465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:27.855692Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:28.331610Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:28.344749Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:28.344770Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:28.344776Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:28.344845Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:28.376694Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:28.693434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:28.700056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:32.211029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> KqpYql::EvaluateFor [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:14:42.232460Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:42.232793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:42.407244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:14:42.420679Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:14:42.447221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:14:42.447782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:42.448045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:14:42.453453Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:14:42.453954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:14:42.454170Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:43.328491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:43.474457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:43.474621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:43.475439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:43.475519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:43.531011Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:14:43.532142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:43.532777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:43.646424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:43.689850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:14:44.527056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:44.605107Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:44.605263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:45.005349Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:224:2183] Handle TEvProposeTransaction 2026-01-10T12:14:45.005426Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:224:2183] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:14:45.005561Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:224:2183] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1565:3022] 2026-01-10T12:14:45.170645Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1565:3022] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:14:45.170771Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1565:3022] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:14:45.171629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1565:3022] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:14:45.171747Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1565:3022] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:14:45.172279Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1565:3022] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:14:45.172440Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1565:3022] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:14:45.172591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1565:3022] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:14:45.177264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:45.177838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1565:3022] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:14:45.188696Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1565:3022] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:14:45.188804Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1565:3022] txid# 281474976715657 SEND to# [1:1492:2979] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:14:45.333345Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1616:2397] 2026-01-10T12:14:45.333720Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:14:45.435217Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:14:45.435478Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:14:45.440207Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:14:45.440321Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:14:45.440391Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:14:45.440912Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:14:45.441268Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:14:45.441372Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1640:2397] in generation 1 2026-01-10T12:14:45.459789Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:14:45.537736Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:14:45.537967Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:14:45.538066Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1643:2414] 2026-01-10T12:14:45.538141Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:14:45.538174Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:14:45.538204Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:14:45.538636Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:14:45.538732Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:14:45.538878Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:14:45.538914Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:14:45.538947Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:14:45.538981Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:14:45.539153Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1600:3052], serverId# [2:1613:2395], sessionId# [0:0:0] 2026-01-10T12:14:45.539546Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:14:45.542057Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:14:45.542232Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:14:45.545270Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPa ... {KQPEX@kqp_executer_impl.h:879} ActorId: [3:1916:3217] TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:1924:2443], trace_id# -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2026-01-10T12:15:12.255774Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:1916:3217] TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:1924:2443] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 328299 DurationUs: 9000 Tasks { TaskId: 1 CpuTimeUs: 325419 FinishTimeMs: 1768047312251 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 100 ReadBytes: 800 } ResultRows: 1 ResultBytes: 7 ComputeCpuTimeUs: 243 BuildCpuTimeUs: 325176 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 4 StartTimeMs: 1768047312242 CreateTimeMs: 1768047311863 UpdateTimeMs: 1768047312251 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:15:12.255861Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:1924:2443] 2026-01-10T12:15:12.255985Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [3:1916:3217] TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:15:12.256060Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [3:1916:3217] TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-10T12:15:12.256171Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [3:1916:3217] TxId: 281474976710662. Ctx: { TraceId: 01kekx9xehdw63w8spkxvhb7z2, Database: , SessionId: ydb://session/3?node_id=3&id=ZTgwYmQ4YjQtODY0MDFjN2MtMWJlNjg2ZjYtOTM3NDE2ODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.940268s ReadRows: 100 ReadBytes: 800 RequestUnits# 626 ForceFlag# true trace_id# 2026-01-10T12:15:12.257206Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down 2026-01-10T12:15:12.257310Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:224:2183] Handle TEvProposeTransaction 2026-01-10T12:15:12.257352Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:224:2183] TxId# 0 ProcessProposeTransaction 2026-01-10T12:15:12.257469Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:224:2183] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1946:3258] SnapshotReq marker# P0 2026-01-10T12:15:12.258571Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1949:3258] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-10T12:15:12.258805Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1949:3258] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-10T12:15:12.258932Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1946:3258] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 ... waiting for SysViewsRoster update finished 2026-01-10T12:15:26.009842Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:26.011091Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:26.014237Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:15:26.028385Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:26.029635Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:26.030227Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:26.030300Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:26.032755Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:26.032854Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:26.477944Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:26.618168Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:26.618326Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:26.618932Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:26.619032Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:26.714101Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-10T12:15:26.714807Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:26.715288Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:26.847124Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:26.879470Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:27.777522Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:27.856505Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:27.856690Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:28.270137Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:28.987612Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1725:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:28.987778Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1736:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:28.987889Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:28.993554Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1740:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:28.993791Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:29.009237Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:29.689158Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1739:3119], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:15:29.810433Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:1835:3187] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:32.875161Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNamesCore::NameListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::PrefixListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::ExceptionsListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit+AllowOverlimit [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system >> YdbProxy::ReadNonExistentTopic [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn |89.7%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 11443, MsgBus: 17490 2026-01-10T12:14:59.118101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705330131595525:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:59.118166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:59.752184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:59.861936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:59.862051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:59.924061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:59.960989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705330131595431:2081] 1768047299091184 != 1768047299091187 2026-01-10T12:14:59.976927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:00.036192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:00.164232Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:00.356770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:00.356793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:00.356800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:00.356890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:01.422457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:01.442568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:01.572262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:01.984019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:02.353065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:02.519861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:04.120305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705330131595525:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:04.120381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:05.178484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705355901401096:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:05.178600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:05.179873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705355901401106:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:05.179911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:05.633930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.684034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.727693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.779422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.826398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.922329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.991144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.072924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.202898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705360196369284:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.203007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.203499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705360196369290:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.203538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705360196369289:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.203649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.207649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:06.227497Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705360196369293:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:06.291084Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705360196369344:3789] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:10.239129Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7593705373081271560:2541] 2026-01-10T12:15:10.2451 ... xy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 178ms, session id ydb://session/3?node_id=2&id=ZmU5NTVlYmYtYjlmMTJlNDUtOTc3YzlmZTEtZWRkMTI0NTc= } 2026-01-10T12:15:28.816293Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705452768923531:2953] 2026-01-10T12:15:28.832430Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 181ms, session id ydb://session/3?node_id=2&id=NTQzMDhkNzctOTViMGNlMTItMTkwMjBhNS1lMTI2MmZmMw== } 2026-01-10T12:15:29.008411Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705452768923554:2962] 2026-01-10T12:15:29.020446Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 184ms, session id ydb://session/3?node_id=2&id=ODZjN2VhMWItMmFjYjUxODEtZWMxYzk2MGYtN2E1NTRjMTQ= } 2026-01-10T12:15:29.199079Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705457063890865:2968] 2026-01-10T12:15:29.208279Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 187ms, session id ydb://session/3?node_id=2&id=ZWQ1N2IxMDAtYTA2YjhhZDMtNDBjYTFlMWUtYzA2ZWI3MGY= } 2026-01-10T12:15:29.399936Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705457063890898:2979] 2026-01-10T12:15:29.409059Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 190ms, session id ydb://session/3?node_id=2&id=Y2JjNmViMDEtNjkwZDViNWYtNzgzYjQyOC0xYzZjNmI3NA== } 2026-01-10T12:15:29.593968Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705457063890923:2990] 2026-01-10T12:15:29.610128Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 193ms, session id ydb://session/3?node_id=2&id=OGRhYzZkMjctNGM2OTc1ZGYtNzA4YzZmZTItNmIxOWIwNWI= } 2026-01-10T12:15:29.799821Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705457063890990:3002] 2026-01-10T12:15:29.816885Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 196ms, session id ydb://session/3?node_id=2&id=ZTExN2JhMmQtMzJjYmY2ZjEtYzZkZjdjNmItZTBiYjg3MDA= } 2026-01-10T12:15:30.005784Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705457063891021:3016] 2026-01-10T12:15:30.016152Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 199ms, session id ydb://session/3?node_id=2&id=MmJkMDkzZjQtMTU3YjM3YjEtZjhlNDJmODQtNTZiZjg5ZjA= } 2026-01-10T12:15:30.211461Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705461358858358:3030] 2026-01-10T12:15:30.284281Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 202ms, session id ydb://session/3?node_id=2&id=OTZmYWE1OWQtNTU3NDMyMTItOGFhNTEwMzAtYmMzN2E5NmQ= } 2026-01-10T12:15:30.422484Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705461358858381:3040] 2026-01-10T12:15:30.441682Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 205ms, session id ydb://session/3?node_id=2&id=NTQ2ZjdiNTQtZGI3ODQzZjktNmQ4ZTU4NzktN2NkOGU3MDA= } 2026-01-10T12:15:30.644174Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705461358858403:3049] 2026-01-10T12:15:30.676140Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 208ms, session id ydb://session/3?node_id=2&id=MzBiNWRmNWQtMjcwNTQ5NTEtNDBkMzA0Ny1iYWM0MzgwMg== } 2026-01-10T12:15:30.858540Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705461358858428:3060] 2026-01-10T12:15:30.879778Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 211ms, session id ydb://session/3?node_id=2&id=MzI5Y2QzMi02MmUzMDdjMy1jZjU4OGNjZi1hYjM2YzU0Yw== } 2026-01-10T12:15:31.092996Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705461358858449:3069] 2026-01-10T12:15:31.100307Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 214ms, session id ydb://session/3?node_id=2&id=M2QzNmVmZGUtYTMyMmFhODMtYjVmM2RlNWQtMWRiMWE5OWY= } 2026-01-10T12:15:31.320687Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705465653825769:3079] 2026-01-10T12:15:31.400899Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 217ms, session id ydb://session/3?node_id=2&id=ODRiYjhhZWQtZDU1YWZlYTEtMzA3MTYxMmQtYWViMGYzMGU= } 2026-01-10T12:15:31.551886Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705465653825789:3088] 2026-01-10T12:15:31.557588Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 220ms, session id ydb://session/3?node_id=2&id=NWFmYzBmOTctNGU3OWE0NjYtZmFjZjc0NDAtNDk1ZjVmNmI= } 2026-01-10T12:15:31.733585Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047331750, txId: 281474976710677] shutting down 2026-01-10T12:15:31.976520Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705465653825913:3114] 2026-01-10T12:15:31.987585Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 226ms, session id ydb://session/3?node_id=2&id=NGZlZmY5OS1mYmMxZDVjMS02NTY1ODU5MS1hNmIwYjVlMA== } 2026-01-10T12:15:32.213775Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705465653825935:3123] 2026-01-10T12:15:32.234092Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 229ms, session id ydb://session/3?node_id=2&id=OGY0ZWNjMzUtYjRjMzZiZWMtY2I5Njg5Zi1hNDQ5ZWZiMw== } 2026-01-10T12:15:32.447577Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705469948793254:3133] 2026-01-10T12:15:32.468773Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 232ms, session id ydb://session/3?node_id=2&id=ZDczOTkzMWItNmJkOTc1MGItZDU2M2EzMDAtYjRiMWY0MDk= } 2026-01-10T12:15:32.687648Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705469948793269:3139] 2026-01-10T12:15:32.702929Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 235ms, session id ydb://session/3?node_id=2&id=Yzk3OWY0ZjAtODZkOTNkODYtOTJiNWFlZDUtYjZhMjg3NzQ= } 2026-01-10T12:15:32.932176Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705469948793294:3150] 2026-01-10T12:15:32.946083Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 238ms, session id ydb://session/3?node_id=2&id=Y2VhYTYyZGQtYjc1OGYyZDctYTcyNTgzNDAtN2U0MDFjOTU= } 2026-01-10T12:15:33.190836Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705469948793315:3159] 2026-01-10T12:15:33.195233Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 241ms, session id ydb://session/3?node_id=2&id=NDA3NTlmYTEtZDk2ZWFmNGEtMjFkNTI0YjAtNDY5OGMwOTE= } 2026-01-10T12:15:33.428053Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047333451, txId: 281474976710679] shutting down 2026-01-10T12:15:33.428243Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705474243760635:3169] 2026-01-10T12:15:33.444233Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 244ms, session id ydb://session/3?node_id=2&id=MTAxMTJmZTctZDJjZGJmNmMtMjU3MTcxZC04MGFkMzY2NQ== } 2026-01-10T12:15:33.679081Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705474243760739:3187] 2026-01-10T12:15:33.686283Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 247ms, session id ydb://session/3?node_id=2&id=Y2M5ZDVhZDAtODllZTNjZWQtOWI0OGUzMmEtNTc3MTk1ZWE= } 2026-01-10T12:15:33.936453Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705474243760768:3200] 2026-01-10T12:15:33.992233Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 250ms, session id ydb://session/3?node_id=2&id=Y2I3NWQ0NjAtNjUxNmVhY2YtNWMyNGRhOTgtMWE1NTYwMDQ= } 2026-01-10T12:15:34.179563Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047334207, txId: 281474976710682] shutting down 2026-01-10T12:15:34.448633Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7593705478538728230:3228] 2026-01-10T12:15:34.474124Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 256ms, session id ydb://session/3?node_id=2&id=MzM1MmRmMy1iYWQzZDYwZi1hOWQ3NjAzOC1hZjM4ZDIzNw== } 2026-01-10T12:15:34.548857Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047334578, txId: 281474976710684] shutting down |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 26345, MsgBus: 20144 2026-01-10T12:15:17.855850Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705405484013453:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:17.859665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:18.323317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:18.334915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:18.335015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:18.344339Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705405484013345:2081] 1768047317798353 != 1768047317798356 2026-01-10T12:15:18.350781Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:18.357543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:18.616981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:18.632700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:18.632743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:18.632751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:18.632830Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:18.860415Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:19.669582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:19.697069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:19.802727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.199993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.532606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:20.715511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:22.832968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705405484013453:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:22.833039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:23.964472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705431253819000:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.964559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.965882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705431253819011:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:23.965916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.309413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.363230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.401122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.432635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.464838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.505843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.554939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.636652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:24.741893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705435548787180:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.741984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.742418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705435548787185:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.742464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705435548787186:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.742572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:24.746841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:24.761201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705435548787189:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:24.870849Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705435548787242:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2016, MsgBus: 19651 2026-01-10T12:15:28.087280Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705454242209596:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:28.087851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:28.132059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:28.250374Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:28.268623Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:28.271344Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705454242209550:2081] 1768047328061347 != 1768047328061350 2026-01-10T12:15:28.301777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:28.301862Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:28.313387Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:28.439890Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:28.464602Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:28.464625Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:28.464636Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:28.464702Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:29.048474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:29.057882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:15:29.078820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:29.088263Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:29.204989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:29.441191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:29.533254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.021460Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705471422080595:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.021572Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.021907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705471422080605:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.021954Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.111300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.181479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.276134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.378998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.466071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.547829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.629211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.739288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.873772Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705471422081484:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.873913Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.874685Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705471422081489:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.874731Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705471422081490:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.875068Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:32.879057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:32.922847Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705471422081493:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:15:33.011175Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705475717048840:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:33.090422Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705454242209596:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:33.090526Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2026-01-10T12:15:20.012896Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705418266331004:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:20.013165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:20.920525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:20.920630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:21.071151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:21.071317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:21.098258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705413971363582:2081] 1768047319998302 != 1768047319998305 2026-01-10T12:15:21.160921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:21.164316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:21.181623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:21.292200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:22.273399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:22.273423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:22.273429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:22.273503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:22.907546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:22.914411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:23.418379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:25.012247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705418266331004:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:25.012352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:25.858914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705439741168498:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.859090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.859500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705439741168514:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.859537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705439741168515:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.859565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705439741168516:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.859598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:25.864820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:25.935904Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705439741168522:2643] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:15:25.940114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705439741168521:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:15:25.940678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:15:25.940811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705439741168520:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:15:26.008056Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705444036135865:2674] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:26.489197Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705444036135898:2688] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:27.089182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:15:27.667596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:28.317845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:15:28.793566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:15:29.606023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:15:32.489500Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705470384114241:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:32.489770Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:32.746388Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:32.750801Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:32.750880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:32.759675Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:32.819010Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:32.941874Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:33.256782Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:33.256805Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:33.256816Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:33.256912Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:33.533929Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:33.726759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:33.733660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:31.426487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:31.426582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.426642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:31.426682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:31.426757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:31.426806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:31.426865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.426945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:31.427859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:31.428502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:31.577672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:31.577766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:31.578677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:31.599441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:31.599959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:31.600263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:31.624310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:31.624752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:31.625537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.626057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:31.637249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:31.637485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:31.638873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:31.638946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:31.639008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:31.639051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:31.639093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:31.639316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.658452Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:31.845204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:31.845469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.845698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:31.845752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:31.846037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:31.846094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:31.848650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.848882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:31.849221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.849351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:31.849401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:31.849438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:31.853522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.853610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:31.853651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:31.857288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.857348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.857413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.857488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:31.861716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:31.865098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:31.865331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:31.866516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.866696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:31.866750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.867075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:31.867130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.867319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:31.867414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:31.881304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 72057594046678944, txId: 202, path id: 128 2026-01-10T12:15:36.435975Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [13:211:2212], at schemeshard: 72057594046678944, txId: 202, path id: 128 2026-01-10T12:15:36.436017Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [13:211:2212], at schemeshard: 72057594046678944, txId: 202, path id: 129 2026-01-10T12:15:36.436060Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [13:211:2212], at schemeshard: 72057594046678944, txId: 202, path id: 129 2026-01-10T12:15:36.436147Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [13:211:2212], at schemeshard: 72057594046678944, txId: 202, path id: 130 2026-01-10T12:15:36.437735Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.437846Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.437885Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:15:36.437927Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:15:36.437967Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:15:36.439545Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.439644Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.439682Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:15:36.439719Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:15:36.439762Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:15:36.441108Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.441203Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.441240Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:15:36.441274Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-10T12:15:36.441312Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:15:36.442014Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.442119Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:15:36.442151Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:15:36.442182Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-10T12:15:36.442218Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-10T12:15:36.442290Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:15:36.445296Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:15:36.445468Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:15:36.448266Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:15:36.448391Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:15:36.450144Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:15:36.450192Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:15:36.452045Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:15:36.452302Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.452343Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [13:2723:4712] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:15:36.453824Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:15:36.453870Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:15:36.453973Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:15:36.454007Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:15:36.454126Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:15:36.454166Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:15:36.454248Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:15:36.454287Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:15:36.454379Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:15:36.454411Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:15:36.456463Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:15:36.456697Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.456770Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [13:2726:4715] 2026-01-10T12:15:36.457318Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:15:36.457587Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:15:36.457652Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.457688Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [13:2726:4715] 2026-01-10T12:15:36.457839Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:15:36.457924Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.457955Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [13:2726:4715] 2026-01-10T12:15:36.458109Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:15:36.458193Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.458227Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [13:2726:4715] 2026-01-10T12:15:36.458373Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:15:36.458405Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [13:2726:4715] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> GroupWriteTest::TwoTables |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin >> Channels20::IcChannelEarlyFinish [GOOD] >> Channels20::IcChannelBackPressure >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> TTopicApiDescribes::DescribeConsumer |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TestShred::SimpleTestForTopic >> TIcNodeCache::GetNodesInfoTest |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> TTopicApiDescribes::GetLocalDescribe >> TTopicApiDescribes::DescribeTopic >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |89.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> TColumnShardTestSchema::RebootForgetWithLostAnswer |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 18825, MsgBus: 22658 2026-01-10T12:15:05.743237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705356746323208:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:05.743731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:06.240316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:06.278514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:06.278642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:06.340208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:06.353726Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705356746323152:2081] 1768047305680023 != 1768047305680026 2026-01-10T12:15:06.361588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:06.420067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:06.568799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:06.568828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:06.568839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:06.568914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:06.757251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:07.144184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:07.290375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.535655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.757906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.871384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:10.768208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705356746323208:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:10.768287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:11.394243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382516128825:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.394363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.394840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705382516128835:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.394885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.985591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.053070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.124648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.175057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.256769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.326038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.385264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.509006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.651260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705386811097009:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.651355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.651869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705386811097014:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.651926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705386811097015:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.651957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:12.656918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:12.676386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705386811097018:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:12.750060Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705386811097077:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:16.030328Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTRkZTdiZmUtNjlmZjBjYS02ZGIxZTg4Mi0xMjhjODY4OA==, ActorId: [1:7593705403990966585:2543], ActorState: ExecuteState, LegacyTraceId: 01kekxa31mfjh7v5f508pxh7c6, Create QueryResponse for error on request, msg: status# TIMEOUT issue ... TADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705465813400002:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:31.146418Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:31.289209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:31.509008Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:31.512280Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:31.512420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:31.514161Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705465813399857:2081] 1768047331095087 != 1768047331095090 2026-01-10T12:15:31.544190Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:31.544218Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:31.824849Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:31.824875Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:31.824885Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:31.824985Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:32.158648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:32.158769Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:32.625690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:32.634672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:32.648663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.768437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:33.149012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:33.297190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:35.904290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705482993270936:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:35.904410Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:35.904988Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705482993270946:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:35.905048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.040686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.086133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.124567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.135060Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705465813400002:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:36.135126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:36.170708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.243918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.310994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.370924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.445962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:36.542784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705487288239115:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.542845Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.542931Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705487288239121:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.543197Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705487288239124:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.543237Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:36.546072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:36.558301Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705487288239123:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:36.643525Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705487288239176:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system >> MoveTable::WithData-Reboot >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 10330, MsgBus: 9179 2026-01-10T12:14:26.653175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705189147545412:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:26.653215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:27.053165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:27.053344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:27.093110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:27.126088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:27.182659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705189147545389:2081] 1768047266649178 != 1768047266649181 2026-01-10T12:14:27.187381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:27.287906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:27.287938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:27.287949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:27.288040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:27.428044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:27.691584Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:27.769102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:14:27.769453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:14:27.785257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:27.786050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:14:27.797055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047267840, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:14:27.798186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-10T12:14:27.798232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-10T12:14:27.798477Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705193442513224:2252] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-10T12:14:27.798811Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705189147545357:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:27.798963Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705189147545360:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:27.799004Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705189147545363:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:27.799260Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705189147545862:2214][/Root] Path was updated to new version: owner# [1:7593705189147545683:2122], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:27.799704Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705193442513224:2252] Ack update: ack to# [1:7593705189147545747:2148], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:14:27.799865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-10T12:14:27.800270Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705193442513387:2289][/Root] Path was updated to new version: owner# [1:7593705193442513381:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:27.800555Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705193442513386:2288][/Root] Path was updated to new version: owner# [1:7593705193442513380:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:30.048767Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:30.051082Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705206327415427:2497][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705189147545683:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:30.085130Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010d9/r3tmp/spilling-tmp-runner/node_1_dc8a2d39-6dbe6d0e-433dcaba-f0b98ff1, actor: [1:7593705206327415435:2308] 2026-01-10T12:14:30.085768Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010d9/r3tmp/spilling-tmp-runner 2026-01-10T12:14:30.087236Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx8kzcfsvqzbhd9wr7bzy9", Request has 18444976026439.464421s seconds to be completed 2026-01-10T12:14:30.092206Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx8kzcfsvqzbhd9wr7bzy9", Created new session, sessionId: ydb://session/3?node_id=1&id=MWQ5Y2NlOWYtMjFlMmJkZTEtY2EzMDMyNWYtN2MzOTA0MDI=, workerId: [1:7593705206327415455:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:14:30.092460Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx8kzcfsvqzbhd9wr7bzy9 2026-01-10T12:14:30.092957Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:14:30.093004Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:14:30.093039Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:30.093650Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705206327415456:2499][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705189147545683:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:30.094601Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705206327415457:2500][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705189147545683:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:14:30.095655101 242413 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:30.095856018 242413 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:14:30.108751962 242413 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:30.108922792 242413 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:30.111534Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:14:30.118793169 242413 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:30.118972313 242413 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:30.120623Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:14:30.124023Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx8p7bd94a034vcqgcz2z5, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5Y2NlOWYtMjFlMmJkZTEtY2EzMDMyNWYtN2MzOTA0MDI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705206327415455:2327] 2026-01-10T12:14:30.124072Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7593705206327415473:2502] 2026-01-10T12:14:30.126046Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705206327415475:2503][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7593705189147545683:2122], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system >> TColumnShardTestSchema::ExportAfterFail >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous >> MoveTable::WithData-Reboot [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |89.8%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData-Reboot [GOOD] Test command err: 2026-01-10T12:15:45.279578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:45.316880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:45.317148Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:45.325251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:45.325532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:45.325795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:45.325955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:45.326096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:45.326196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:45.326296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:45.326453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:45.326601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:45.326741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.326844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:45.326967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:45.327092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:45.420328Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:45.420954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:45.421022Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:45.421229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:45.421496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:45.421598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:45.421648Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:45.421769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:45.421842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:45.421894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:45.421942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:45.422147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:45.422217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:45.422270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:45.422345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:45.422456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:45.422542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:45.422597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:45.422636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:45.422691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:45.422740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:45.422772Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:45.422821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:45.422865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:45.422901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:45.423135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:45.423251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:45.423292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:45.423430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:45.423492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.423551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.423636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:45.423694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:45.423725Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:45.423776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:15:45.423817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:15:45.423847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:15:45.424006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:15:45.424064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:15:46.363456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:331:2343];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:15:46.363669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.364045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.364229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.364489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:15:46.364654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.364869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.365200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:337:2349] finished for tablet 9437184 2026-01-10T12:15:46.365763Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:331:2343];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.016},{"events":["f_ack"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":2063090,"name":"_full_task","f":2063090,"d_finished":0,"c":0,"l":2082938,"d":19848},"events":[{"name":"bootstrap","f":2063525,"d_finished":2864,"c":1,"l":2066389,"d":2864},{"a":2082112,"name":"ack","f":2080100,"d_finished":1837,"c":1,"l":2081937,"d":2663},{"a":2082099,"name":"processing","f":2066693,"d_finished":5553,"c":3,"l":2081941,"d":6392},{"name":"ProduceResults","f":2065490,"d_finished":3052,"c":6,"l":2082556,"d":3052},{"a":2082561,"name":"Finish","f":2082561,"d_finished":0,"c":0,"l":2082938,"d":377},{"name":"task_result","f":2066714,"d_finished":3626,"c":2,"l":2079940,"d":3626}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.365895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:331:2343];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:15:46.367842Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:331:2343];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.016},{"events":["f_ack"],"t":0.017},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":2063090,"name":"_full_task","f":2063090,"d_finished":0,"c":0,"l":2083597,"d":20507},"events":[{"name":"bootstrap","f":2063525,"d_finished":2864,"c":1,"l":2066389,"d":2864},{"a":2082112,"name":"ack","f":2080100,"d_finished":1837,"c":1,"l":2081937,"d":3322},{"a":2082099,"name":"processing","f":2066693,"d_finished":5553,"c":3,"l":2081941,"d":7051},{"name":"ProduceResults","f":2065490,"d_finished":3052,"c":6,"l":2082556,"d":3052},{"a":2082561,"name":"Finish","f":2082561,"d_finished":0,"c":0,"l":2083597,"d":1036},{"name":"task_result","f":2066714,"d_finished":3626,"c":2,"l":2079940,"d":3626}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:15:46.367961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:15:46.273140Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:15:46.368003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:15:46.368199Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-10T12:15:46.369152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-10T12:15:46.369556Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1768047346252:12} readable: {1768047346252:max} at tablet 9437184 2026-01-10T12:15:46.369712Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-10T12:15:46.369777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1768047346252:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:46.369872Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1768047346252:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> TestShred::SimpleTestForTopic [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant [GOOD] >> DqUnboxedValueToNativeArrowConversion::ListOfJsons [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:25.413061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:25.557735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:25.570565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:25.571043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:25.571160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:25.991246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:25.991399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:25.998400Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047321741974 != 1768047321741977 2026-01-10T12:15:26.015889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:26.069435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:26.194879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:26.523920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:26.544421Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:26.660942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:26.744923Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:15:26.745226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:26.801223Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:15:26.801475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:26.819677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:26.819877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:26.825978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:26.826116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:26.826188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:26.826608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:26.826834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:26.826945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:15:26.827526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:26.827638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:26.832830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:15:26.832922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:15:26.832994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:15:26.833336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:26.833463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:26.833533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:15:26.844486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:26.914818Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:26.915054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:26.915173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:15:26.915224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:26.915271Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:26.915311Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:26.915630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:26.915682Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:15:26.915740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:26.915791Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:15:26.915814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:26.915849Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:15:26.915877Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:26.916244Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:26.916331Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:26.916588Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:26.916636Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:26.916679Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:26.916724Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:26.916776Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:15:26.916849Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:15:26.916995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:15:26.917066Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:26.917092Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:26.917117Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:15:26.917151Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:26.917637Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:26.917934Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:26.918048Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:26.918557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:891:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-10T12:15:26.918772Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:15:26.918946Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:26.919004Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-10T12:15:26.921351Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:26.921446Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:26.933154Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:26.933269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:26.933947Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:15:26.934009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:27.070830Z node 1 :TX_DATASHARD ... ransaction::Execute at 72075186224037889 2026-01-10T12:15:47.515882Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:47.515907Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037889 2026-01-10T12:15:47.516056Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:15:47.516197Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:47.516327Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:47.516398Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-01-10T12:15:47.516912Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:47.517381Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:47.518752Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:47.518827Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:47.519220Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:47.519610Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:47.522232Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:47.522301Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.525394Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-01-10T12:15:47.525446Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:47.525937Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:47.526036Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:47.527248Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-10T12:15:47.527315Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:47.528157Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:47.528204Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:47.528255Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:47.528348Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:47.528420Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:47.528524Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.530339Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.530467Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:47.530869Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:47.530936Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:47.530973Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:15:47.531039Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:47.531091Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:47.531163Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:47.535193Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:47.535429Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:47.535509Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:47.541673Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:47.542388Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-10T12:15:47.542466Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:15:47.552800Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.552908Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1009:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.553006Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.553631Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.553807Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.558903Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:47.571513Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.571685Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:47.699473Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.699598Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:47.702780Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1013:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:15:47.738448Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:47.819218Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2925], serverId# [4:1140:2926], sessionId# [0:0:0] 2026-01-10T12:15:47.819605Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-10T12:15:47.819909Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047347819815 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:47.820137Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-10T12:15:47.831499Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:47.831617Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:47.836427Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1146:2931], serverId# [4:1147:2932], sessionId# [0:0:0] 2026-01-10T12:15:47.842246Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1148:2933], serverId# [4:1149:2934], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:27.268771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:27.390931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:27.400470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:27.400968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:27.401078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:27.797805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:27.798033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:27.804934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047324229925 != 1768047324229928 2026-01-10T12:15:27.816574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:27.866619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:27.974826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:28.306153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:28.325695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:28.460948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:28.509195Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:15:28.509524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:28.559334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:28.559490Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:28.561133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:28.561245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:28.561316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:28.561753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:28.561927Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:28.561997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:15:28.573519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:28.613547Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:28.613793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:28.613934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:15:28.613982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:28.614019Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:28.614056Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.614642Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:28.614774Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:28.614891Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.614951Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:28.614997Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:28.615045Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.615118Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:15:28.615306Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:28.615549Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:28.615686Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:28.617877Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:28.630537Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:28.630667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:28.768828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:15:28.785623Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:28.785722Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.787452Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.787527Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:28.787580Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:28.787901Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:15:28.788069Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:28.798131Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.798237Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:28.800721Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:28.801166Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:28.803870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:28.803931Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.804972Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:28.805068Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.806599Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.806656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:28.806715Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:28.806811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:28.806872Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:28.806964Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.811979Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:28.814075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:28.814263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:28.814364Z node 1 :TX_DATASHARD DEBUG: datashar ... Heartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.075039Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.077077Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:47.077238Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:47.077306Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:47.091808Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:47.091980Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:47.092047Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-10T12:15:47.092117Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-10T12:15:47.093301Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.108924Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:47.275033Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:47.275111Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.275463Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:47.275512Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:47.275566Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:47.275767Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-10T12:15:47.275903Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:47.277035Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:47.277961Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:47.323132Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-10T12:15:47.323249Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:47.323294Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:47.323341Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.323422Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:47.323485Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-10T12:15:47.323580Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.325799Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-10T12:15:47.325888Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:47.334295Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.334420Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.334500Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.335360Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.335514Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.340153Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:47.347202Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.514326Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:47.517562Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:15:47.543215Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:47.617471Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-10T12:15:47.617864Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-10T12:15:47.618148Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047347618049 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:47.618346Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-10T12:15:47.629714Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:47.629858Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.688298Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-10T12:15:47.688565Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047347688476 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:47.688715Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-10T12:15:47.699948Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:47.700028Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.843408Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037888 2026-01-10T12:15:47.843724Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1768047347843595 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:47.843863Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037888, row count=1 2026-01-10T12:15:47.855047Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:47.855121Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:47.857087Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1211:2987], serverId# [4:1212:2988], sessionId# [0:0:0] 2026-01-10T12:15:47.862880Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1213:2989], serverId# [4:1214:2990], sessionId# [0:0:0] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:41.724761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:41.724846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:41.724887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:41.724926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:41.724961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:41.724989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:41.725045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:41.725104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:41.725983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:41.726290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:41.814574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:41.814646Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:41.831374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:41.831519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:41.831740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:41.847748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:41.848059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:41.848840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:41.849141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:41.855661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:41.855919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:41.857272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:41.857343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:41.857409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:41.857457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:41.857498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:41.857781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:42.022059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.022820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.022907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.022959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:42.023516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:15:47.353599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:15:47.353975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2056:3869], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2057:3870] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:15:47.354018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:15:47.354049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-10T12:15:47.354202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-10T12:15:47.354235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:15:47.354274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:15:47.354340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:15:47.354400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:15:47.354460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:15:47.354528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:15:47.908605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1247:3133]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:47.908957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.908983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.909051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.909074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.909125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1247:3133], Recipient [1:1247:3133]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.909156Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:47.931223Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:15:47.931322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:15:47.931378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:15:47.931734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-10T12:15:47.931768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:15:47.931794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:15:47.931865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:15:47.931906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:15:47.931953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:15:47.931984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:15:48.490103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1247:3133]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:15:48.490435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.490462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.490526Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1247:3133], Recipient [1:1247:3133]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.490565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.490737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.490767Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:15:48.511469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:15:48.511531Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:15:48.511560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:15:48.511729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:15:48.511751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:15:48.511778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:15:48.511852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:15:48.511900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:15:48.511945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.981000s, Timestamp# 1970-01-01T00:00:05.158000Z 2026-01-10T12:15:48.511976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-10T12:15:48.514696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:15:48.515163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2078:3891], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:15:48.515210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:15:48.515240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:15:48.515351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:685:2675], Recipient [1:702:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:15:48.515378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:15:48.515408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 5191, MsgBus: 31459 2026-01-10T12:15:04.885874Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705348647286693:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:04.886003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:04.932562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:05.458139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:05.517022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:05.517148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:05.622035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:05.663902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705348647286583:2081] 1768047304823949 != 1768047304823952 2026-01-10T12:15:05.670894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:05.700034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:05.927493Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:06.058840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:06.058876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:06.058891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:06.059032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:07.192015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:07.224219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:07.327083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.670464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.009241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:08.136664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:09.887772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705348647286693:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:09.887849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:10.946115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705374417092235:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.946225Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.946707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705374417092245:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:10.946786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.430395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.486484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.557562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.614455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.658243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.718578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.773143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.859581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:11.993506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705378712060419:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.993623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.994002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705378712060424:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.994039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705378712060425:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.994086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:11.998348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:12.016570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:15:12.016835Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705378712060428:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:12.087302Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705383007027775:3776] txid# 281474976710671, i ... for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 169ms, session id ydb://session/3?node_id=2&id=ZDM2Y2JiNjQtYmIwZjYyZDQtMWFkZjkxM2UtYWM5MWQwOGM= } 2026-01-10T12:15:41.748464Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 172ms, session id ydb://session/3?node_id=2&id=YTMwMTdiYmUtYmViNjhmNjMtMjJkODdjYjQtMzgzYjk1ZjE= } 2026-01-10T12:15:41.884859Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=OGE4ZjhkM2YtMzQxZTQ3NjgtNGI0OWNmNWYtMmVmZDU5OTA=, ActorId: [2:7593705509379732975:2961], ActorState: ExecuteState, LegacyTraceId: 01kekxaw493b5ch6e94wmjp3b0, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 175ms exceeded" severity: 1 }{ message: "Cancelling after 175ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:42.077608Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NTdkNDFhYzQtNzJkZDdlNTMtMTllNWVhYWItMzUzNDA5YzA=, ActorId: [2:7593705509379732998:2971], ActorState: ExecuteState, LegacyTraceId: 01kekxawa5agnmvk7bek3se52b, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 178ms exceeded" severity: 1 }{ message: "Cancelling after 176ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:42.272186Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YWYwMzc0ZWEtNjI0NmYxZDQtMjlkZDMyNjctOWQzYWI2OQ==, ActorId: [2:7593705513674700316:2980], ActorState: ExecuteState, LegacyTraceId: 01kekxawga5rss7jqb7w9cx910, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 181ms exceeded" severity: 1 }{ message: "Cancelling after 178ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:42.528335Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 184ms, session id ydb://session/3?node_id=2&id=YjBlZDVhMjktNzY0ODk1OTItZDk1NGVkY2MtYzc3MmZiZTI= } 2026-01-10T12:15:42.672485Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YjJjNDhiZDItYjM3OGU4NDctNmRlYjI2NDAtZTM1ODEwYw==, ActorId: [2:7593705513674700358:2998], ActorState: ExecuteState, LegacyTraceId: 01kekxawwhedk19esn4745y6fx, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 187ms exceeded" severity: 1 }{ message: "Cancelling after 188ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:42.805453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:15:42.805479Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:42.928223Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 190ms, session id ydb://session/3?node_id=2&id=Njc2OTZhMGYtN2IxZDNlYjgtMTFlYzFhZmMtOTliMmQ1Zg== } 2026-01-10T12:15:43.074185Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MjBlYmQ1MjMtNmFhODY4NmItM2U2M2IyMGMtMmJmYWVmM2E=, ActorId: [2:7593705513674700431:3024], ActorState: ExecuteState, LegacyTraceId: 01kekxax910fkww7s0dmj8a8zv, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 193ms exceeded" severity: 1 }{ message: "Cancelling after 190ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:43.370556Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 196ms, session id ydb://session/3?node_id=2&id=NmQxMzg2ZS1hZDI3ZTc2ZC00NjVkNTU2ZS04ODQzMGZjOQ== } 2026-01-10T12:15:43.520655Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 199ms, session id ydb://session/3?node_id=2&id=ZWE5OTk4MDAtYWQyZGNiZDgtZGFjMjYzY2ItZTY1YjExOTY= } 2026-01-10T12:15:43.777115Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 202ms, session id ydb://session/3?node_id=2&id=NWViMjY1YmYtNTVhZDlhMTAtYzJmZDI2YjctZDA2MmQyNmM= } 2026-01-10T12:15:43.945204Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=OWRiY2E4MTEtMmMzZjg1ZTAtZGJhMjJhM2QtOGY3NWNmYmY=, ActorId: [2:7593705517969667923:3070], ActorState: ExecuteState, LegacyTraceId: 01kekxay3j50w00ghvybbpjkbd, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 205ms exceeded" severity: 1 }{ message: "Cancelling after 210ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:44.176788Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZDk5YTJjOTMtYWRhYjRjNDEtODAxNzQ4OTYtMWI3YjAxZg==, ActorId: [2:7593705517969667957:3080], ActorState: ExecuteState, LegacyTraceId: 01kekxayarewf4h3br4aqq7qnn, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 208ms exceeded" severity: 1 }{ message: "Cancelling after 212ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:44.358704Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047344378, txId: 281474976715679] shutting down 2026-01-10T12:15:44.654649Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 214ms, session id ydb://session/3?node_id=2&id=MjUzZGMyNDktZWU0NmVmMWUtNzFjMGUxM2MtYmNjNWM3NWQ= } 2026-01-10T12:15:44.894507Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 217ms, session id ydb://session/3?node_id=2&id=ZmFjMmEwNTMtMjc5ZWI1ZmItOWQxNmZjYjEtZmNjMjJiNDY= } 2026-01-10T12:15:45.119553Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 220ms, session id ydb://session/3?node_id=2&id=YmNmZTQ0NTktZTQ4N2EzNTctOWI2NWQ3NmItN2FiMGVhNzA= } 2026-01-10T12:15:45.304517Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Mjg1ODgzMjEtNzMyYWE3MmMtMmNlZjM0MDYtOGM3M2M1N2Y=, ActorId: [2:7593705526559602835:3134], ActorState: ExecuteState, LegacyTraceId: 01kekxazdn2c2vbtgqg1htwnjh, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 223ms exceeded" severity: 1 }{ message: "Cancelling after 224ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:45.540427Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:7593705526559602898:3147] TxId: 281474976715684. Ctx: { TraceId: 01kekxazn05sktt4qdvfrmwd5f, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ODk0ZGE5MzktYzIyYTE2OWEtYWI5ODAwZWQtMjVkOGVmYzg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-10T12:15:45.540711Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ODk0ZGE5MzktYzIyYTE2OWEtYWI5ODAwZWQtMjVkOGVmYzg=, ActorId: [2:7593705526559602864:3147], ActorState: ExecuteState, LegacyTraceId: 01kekxazn05sktt4qdvfrmwd5f, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-01-10T12:15:45.540978Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047345575, txId: 281474976715683] shutting down 2026-01-10T12:15:45.592623Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 226ms, session id ydb://session/3?node_id=2&id=MmYwZDdiNDItNGRkYTZiYzYtN2UyMmVjOTMtNGUwYzRkMmY= } 2026-01-10T12:15:45.788292Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047345820, txId: 281474976715686] shutting down 2026-01-10T12:15:45.788460Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YTdiNTc2ODgtZjliYjUxZjEtYTVkN2UyYTEtMTQwZTAzYzI=, ActorId: [2:7593705526559602922:3152], ActorState: ExecuteState, LegacyTraceId: 01kekxazwp1a3c44ddbpee9cn8, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 229ms exceeded" severity: 1 }{ message: "Cancelling after 227ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:46.045076Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 232ms, session id ydb://session/3?node_id=2&id=NGIzYmVhMGUtYThhYzI5MzMtNDM4ZjNjLTI5NDkxYWMx } 2026-01-10T12:15:46.294052Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047346317, txId: 281474976715688] shutting down 2026-01-10T12:15:46.342764Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 235ms, session id ydb://session/3?node_id=2&id=MWRjNzk2NjMtZTg2ZjE2LTY1MGU3NWU0LWVlNWViMjQw } 2026-01-10T12:15:46.541260Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047346576, txId: 281474976715690] shutting down 2026-01-10T12:15:46.592026Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 238ms, session id ydb://session/3?node_id=2&id=MWEwOGIxZTItYWFiZDM2NzItNDlhODM5OTMtODA3YjAxNzU= } 2026-01-10T12:15:46.785802Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NmE5MDAzNDAtNjk3MDhjMTItYzA0ZDIyNzEtNzFlYjRjZmU=, ActorId: [2:7593705530854570512:3208], ActorState: ExecuteState, LegacyTraceId: 01kekxb0vf5ye9f21t8kamdyxt, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 241ms exceeded" severity: 1 }{ message: "Cancelling after 239ms in ExecuteState" severity: 1 } trace_id# 2026-01-10T12:15:47.037412Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047347066, txId: 281474976715692] shutting down 2026-01-10T12:15:47.372973Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 247ms, session id ydb://session/3?node_id=2&id=NDVlYWM1MzYtOGYyMWM3MjctYTc3MWM1YWMtMTFkYTQ1NGQ= } 2026-01-10T12:15:47.556563Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047347584, txId: 281474976715695] shutting down |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 28249, MsgBus: 64355 2026-01-10T12:14:27.951144Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705193301273643:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:27.951250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:28.228328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:28.241197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:28.241370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:28.326412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:28.328393Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705193301273617:2081] 1768047267948980 != 1768047267948983 2026-01-10T12:14:28.333236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:28.392829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:28.525184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:28.525214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:28.525223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:28.525333Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:28.969906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:29.078443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:14:29.078916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:14:29.086193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:29.086976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:14:29.089445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047269135, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:14:29.098077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:14:29.098761Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705197596241446:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:14:29.099119Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705193301273585:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:29.099226Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705193301273588:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:29.099280Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705193301273591:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:29.099325Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705197596241553:2288][/Root] Path was updated to new version: owner# [1:7593705197596241547:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:29.099480Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705197596241350:2204][/Root] Path was updated to new version: owner# [1:7593705193301273904:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:29.099639Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705197596241554:2289][/Root] Path was updated to new version: owner# [1:7593705197596241548:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:29.099814Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705197596241446:2251] Ack update: ack to# [1:7593705197596241271:2149], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:14:29.099978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-10T12:14:29.100342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:14:31.436752Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:31.439588Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010ca/r3tmp/spilling-tmp-runner/node_1_1bc587ba-1df7f578-377e43ca-69414fb9, actor: [1:7593705210481143642:2306] 2026-01-10T12:14:31.439686Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010ca/r3tmp/spilling-tmp-runner 2026-01-10T12:14:31.442253Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx8n8544mcb34mkav74sw5", Request has 18444976026438.109413s seconds to be completed 2026-01-10T12:14:31.446461Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx8n8544mcb34mkav74sw5", Created new session, sessionId: ydb://session/3?node_id=1&id=ZDQ5MjQ3YTAtNWQ5ZGQ3ZGItMThlNTY0M2QtNzliNjAwMjA=, workerId: [1:7593705210481143662:2325], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:14:31.446753Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx8n8544mcb34mkav74sw5 2026-01-10T12:14:31.446831Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:14:31.446873Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:14:31.446908Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:31.455319Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705210481143669:2493][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705193301273904:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:31.455428Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705210481143665:2492][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705193301273904:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:14:31.456004633 242872 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:31.456273853 242872 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0110 12:14:31.460859191 242872 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:31.461017996 242872 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:31.468632Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705210481143670:2494][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705193301273904:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:31.472283Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:14:31.481751982 242873 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:31.481927011 242873 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:31.483597Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:14:31.501766306 242873 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-10T12:14:31.498400Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx8qj91jm0d63hthzrcgfe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ5MjQ3YTAtNWQ5ZGQ3ZGItMThlNTY0M2QtNzliNjAwMjA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593705210481143662:2325] E0110 12:14:31.507682540 242873 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:31.498441Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7593705210481143690:2497] 2026-01-10T1 ... el.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:47.264189Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:15:47.488664Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705533784212473:2339] TxId: 281474976710662. Ctx: { TraceId: 01kekxb17f020hdyagz7hpkawj, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmZhZDQyNzktNThmZGU2MTEtZGJlMzcwZDYtOGNmMWUzYjk=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:15:47.495029Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekxb17f020hdyagz7hpkawj, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmZhZDQyNzktNThmZGU2MTEtZGJlMzcwZDYtOGNmMWUzYjk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705533784212477:2354] 2026-01-10T12:15:47.495210Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekxb17f020hdyagz7hpkawj, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmZhZDQyNzktNThmZGU2MTEtZGJlMzcwZDYtOGNmMWUzYjk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705533784212478:2355] 2026-01-10T12:15:47.496062Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxb17f020hdyagz7hpkawj", Forwarded response to sender actor, requestId: 4, sender: [9:7593705529489245074:2338], selfId: [9:7593705508014407869:2265], source: [9:7593705529489245075:2339] 2026-01-10T12:15:47.497474Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MmZhZDQyNzktNThmZGU2MTEtZGJlMzcwZDYtOGNmMWUzYjk=, workerId: [9:7593705529489245075:2339], local sessions count: 0 E0110 12:15:47.721173829 263423 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:47.721404891 263423 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> MoveTable::RenameAbsentTable_Negative |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::IcChannelBackPressure [GOOD] >> Channels20::CaIntegrationTrivial >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:26.128732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:26.248898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:26.256903Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:26.257356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:26.257480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:26.652913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:26.653036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:26.668704Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047322285219 != 1768047322285222 2026-01-10T12:15:26.698947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:26.749241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:26.875190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:27.320237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:27.341442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:27.473101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:27.559399Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:15:27.559694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:27.634979Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:15:27.637665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:27.652321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:27.652558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:27.654639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:27.654740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:27.654810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:27.655289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:27.655482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:27.655576Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:15:27.656586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:27.656707Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:27.659938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:15:27.660018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:15:27.660066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:15:27.660462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:27.660595Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:27.660689Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:15:27.677337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:27.743899Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:27.744852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:27.745006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:15:27.745055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:27.745111Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:27.745363Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:27.745864Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:27.745949Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:15:27.746030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:27.746130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:15:27.746158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:27.746204Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:15:27.746244Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:27.746886Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:27.747019Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:27.747543Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:27.747599Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:27.747683Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:27.747748Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:27.747965Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:15:27.748060Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:15:27.748254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:15:27.748338Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:27.748381Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:27.748413Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:15:27.748462Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:27.750018Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:27.750551Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:27.750720Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:27.751233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:891:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-10T12:15:27.751724Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:15:27.752427Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:27.752530Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-10T12:15:27.754801Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:27.754901Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:27.768497Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:27.768598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:27.769196Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:15:27.769300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:27.908948Z node 1 :TX_DATASHARD ... tashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:48.512065Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:48.512957Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:48.561324Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:15:48.561955Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:48.562013Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:48.562084Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:48.562186Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:48.562251Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:48.562381Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:48.562963Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:48.563022Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:48.563055Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:15:48.563102Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:48.563141Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:48.563202Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:48.564016Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2026-01-10T12:15:48.564317Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:15:48.564967Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2026-01-10T12:15:48.565011Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:15:48.566429Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:15:48.566471Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:15:48.566508Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-10T12:15:48.566562Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:48.566604Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:48.566661Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:15:48.575638Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:48.576229Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:48.576317Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:48.576494Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:48.577058Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-10T12:15:48.577114Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:15:48.577612Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:48.578115Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-10T12:15:48.578165Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-10T12:15:48.590481Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:48.590599Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:48.590691Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:48.591854Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:48.592255Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:48.597561Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:48.605408Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:48.605540Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:48.605602Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:15:48.794610Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:15:48.794966Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:48.795049Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:48.798557Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:15:48.832910Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:48.925184Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1223:2975], serverId# [4:1224:2976], sessionId# [0:0:0] 2026-01-10T12:15:48.925544Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-10T12:15:48.925822Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047348925723 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:48.926014Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047348925723 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:48.926125Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-10T12:15:48.937268Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:48.937358Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:48.942610Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1230:2981], serverId# [4:1231:2982], sessionId# [0:0:0] 2026-01-10T12:15:48.947615Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1232:2983], serverId# [4:1233:2984], sessionId# [0:0:0] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:28.314312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:28.442537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:28.454863Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:28.455488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:28.455637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:28.863090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:28.863311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:28.897323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047324697725 != 1768047324697728 2026-01-10T12:15:28.921107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:28.973327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:29.087907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:29.511628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:29.527865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:29.643315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:29.695077Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:15:29.695382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:29.746841Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:15:29.747122Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:29.756680Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:29.756897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:29.758906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:29.759037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:29.759114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:29.759533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:29.759709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:29.759807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:15:29.760347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:29.760465Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:29.761971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:15:29.762041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:15:29.762098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:15:29.762431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:29.762523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:29.762590Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:15:29.773542Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:29.804731Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:29.804992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:29.805143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:15:29.805191Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:29.805240Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:29.805293Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:29.805638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:29.805675Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:15:29.805729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:29.805782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:15:29.805806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:29.805844Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:15:29.805879Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:29.806287Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:29.806386Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:29.806567Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:29.806610Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:29.806663Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:29.806708Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:29.806756Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:15:29.806824Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:15:29.807024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:15:29.807092Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:29.807117Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:29.807143Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:15:29.807180Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:29.807686Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:29.807981Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:29.808109Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:29.808575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:891:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-10T12:15:29.808796Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:15:29.808993Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:29.809053Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-10T12:15:29.811202Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:29.811299Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:29.823752Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:29.823874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:29.824563Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:15:29.824623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:29.944809Z node 1 :TX_DATASHARD ... 186224037890 step# 2000} 2026-01-10T12:15:49.017279Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:15:49.017929Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2026-01-10T12:15:49.017975Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:15:49.019609Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:15:49.019655Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:15:49.019693Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-10T12:15:49.019748Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:49.019806Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:49.019900Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:15:49.025158Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:49.025764Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:49.025861Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:49.026024Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:49.026616Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-10T12:15:49.026677Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:15:49.027209Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:49.027764Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-10T12:15:49.027818Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-10T12:15:49.040511Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:49.040635Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:49.040723Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:49.041967Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:49.042233Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:49.047517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:49.055211Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:49.055337Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:49.055398Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:15:49.176884Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:15:49.177150Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:49.177201Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:49.179633Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:15:49.216297Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:49.328108Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1223:2975], serverId# [4:1224:2976], sessionId# [0:0:0] 2026-01-10T12:15:49.328485Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-10T12:15:49.328736Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047349328656 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.328882Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047349328656 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.328958Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-10T12:15:49.340211Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:49.340310Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:49.465101Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-10T12:15:49.465389Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1768047349465294 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.465549Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1768047349465294 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.465626Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1768047349465294 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.465689Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1768047349465294 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:49.465766Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-10T12:15:49.476841Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:49.476940Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:49.481500Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1271:3015], serverId# [4:1272:3016], sessionId# [0:0:0] 2026-01-10T12:15:49.487518Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1273:3017], serverId# [4:1274:3018], sessionId# [0:0:0] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges >> MoveTable::RenameAbsentTable_Negative [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] Test command err: 2026-01-10T12:15:50.874640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:50.907412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:50.907659Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:50.915216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:50.915478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:50.915728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:50.915861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:50.915968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:50.916060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:50.916193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:50.916322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:50.916487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:50.916599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.916699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:50.916821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:50.916940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:50.948863Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:50.949282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:50.949340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:50.949535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:50.949701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:50.949786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:50.949828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:50.949926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:50.949989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:50.950035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:50.950065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:50.950254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:50.950325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:50.950374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:50.950433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:50.950552Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:50.950608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:50.950648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:50.950682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:50.950736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:50.950780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:50.950806Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:50.950855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:50.950894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:50.950925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:50.951164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:50.951303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:50.951340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:50.951477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:50.951529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.951569Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.951626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:50.951687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:50.951716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:50.951756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:15:50.951791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:15:50.951820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:15:50.951941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:15:50.951994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ta.cpp:29;EXECUTE:db_locksLoadingTime=4; 2026-01-10T12:15:51.243648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-10T12:15:51.243731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=50; 2026-01-10T12:15:51.243786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=16; 2026-01-10T12:15:51.243878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2026-01-10T12:15:51.243928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:15:51.244002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-10T12:15:51.244054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=18; 2026-01-10T12:15:51.244195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=112; 2026-01-10T12:15:51.244236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2776; 2026-01-10T12:15:51.244397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:15:51.244466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:15:51.244568Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:15:51.244881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:15:51.244943Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:15:51.245041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.021000s; 2026-01-10T12:15:51.245398Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:15:51.245503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:15:51.245553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:15:51.245598Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:15:51.245736Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.145000s; 2026-01-10T12:15:51.245794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:15:51.549996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136826516592960;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:51.550069Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136826516592960;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:15:51.564969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136826516592960;op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137032677745408;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:15:51.565109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136826516592960;op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137032677745408;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2026-01-10T12:15:51.565173Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136826516592960;op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047351843;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137032677745408;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-10T12:15:51.565505Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:15:51.565609Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047351843 at tablet 9437184, mediator 0 2026-01-10T12:15:51.565644Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-10T12:15:51.565871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:51.566002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:51.566058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:51.566165Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:15:51.579797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1768047351843;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-10T12:15:51.579896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:15:51.580053Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-10T12:15:51.580161Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-10T12:15:51.580417Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:15:51.596372Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-10T12:15:51.621078Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-10T12:15:51.622082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136826516642912;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=111;dst=2; 2026-01-10T12:15:51.622152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136826516642912;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=111;result=not_found; 2026-01-10T12:15:51.622265Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136826516642912;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=No such table; 2026-01-10T12:15:51.634610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136826516642912;op_tx=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:15:51.634729Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136826516642912;op_tx=11:TX_KIND_SCHEMA;min=1768047351846;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=No such table;tablet_id=9437184;tx_id=11; |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> GroupWriteTest::TwoTables [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> TColumnShardTestSchema::HotTiersAfterTtl >> TColumnShardTestSchema::DropWriteRace >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 15558468071763044477 2026-01-10T12:15:40.722724Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:15:40.722889Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:15:40.752391Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:15:40.752482Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:15:40.752591Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:15:40.752621Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:15:40.757053Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:15:40.757174Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:15:40.779913Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:15:40.780044Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:15:40.784719Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:15:40.784818Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:15:52.578274Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:15:52.578411Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:15:52.578509Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:15:52.617997Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2026-01-10T12:15:52.618129Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} >> GenericFederatedQuery::YdbFilterPushdown [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TColumnShardTestSchema::DropWriteRace [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2734, MsgBus: 23821 2026-01-10T12:11:55.520501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704540041743803:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:11:55.522133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:11:56.103427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:11:56.162549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:56.162637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:56.236856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:56.371477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593704540041743671:2081] 1768047115474123 != 1768047115474126 2026-01-10T12:11:56.377980Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.402579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:11:56.537240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:56.537264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:56.537271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:56.537379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:56.543830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:57.413465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:57.427032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:12:00.516112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593704540041743803:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:00.516194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:12:00.802434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561516581030:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.802462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561516581058:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.802542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.802808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704561516581061:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.802861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:00.807177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:00.842167Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593704561516581060:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:12:00.990858Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593704561516581113:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:12:01.298018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:01.449091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:02.997079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:05.192398Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Read conflict with concurrent transaction.;tx_id=3; 2026-01-10T12:12:05.207961Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:12:05.208346Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:12:05.208603Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7593704582991425750:2954], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [1:7593704578696458186:2954]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7593704582991425750:2954].{
: Error: Read conflict with concurrent transaction., code: 2001 } 2026-01-10T12:12:05.209227Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593704582991425744:2954], SessionActorId: [1:7593704578696458186:2954], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Read conflict with concurrent transaction., code: 2001 . sessionActorId=[1:7593704578696458186:2954]. 2026-01-10T12:12:05.209526Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZjQ1NDBjMWEtYzdlMjZkYjctMzM5N2MyLTNmZjIyMTA4, ActorId: [1:7593704578696458186:2954], ActorState: ExecuteState, LegacyTraceId: 01kekx48jcaqfsptjann4tgs4q, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7593704582991425745:2954] from: [1:7593704582991425744:2954] trace_id# 2026-01-10T12:12:05.209673Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593704582991425745:2954] TxId: 281474976715666. Ctx: { TraceId: 01kekx48jcaqfsptjann4tgs4q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQ1NDBjMWEtYzdlMjZkYjctMzM5N2MyLTNmZjIyMTA4, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Read conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-10T12:12:05.209964Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZjQ1NDBjMWEtYzdlMjZkYjctMzM5N2MyLTNmZjIyMTA4, ActorId: [1:7593704578696458186:2954], ActorState: ExecuteState, LegacyTraceId: 01kekx48jcaqfsptjann4tgs4q, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 13130, MsgBus: 9435 2026-01-10T12:12:07.183406Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704590202014783:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:07.183922Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:07.209275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:07.368421Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:07.368873Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:07.370600Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593704590202014759:2081] 1768047127179090 != 1768047127179093 2026-01-10T12:12:07.395052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:07.395162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:07.445808Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileS ... esolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.178900Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.179694Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.179760Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.179779Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.191312Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.191387Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.191409Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.193140Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.193205Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.193225Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.203867Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.203946Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.203968Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.204539Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.204603Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.204621Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.216043Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.216243Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.216269Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.228002Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.228206Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.228233Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.240369Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.240443Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.240466Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.252285Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.252362Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.252381Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.264409Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.264486Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.264507Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.277360Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.277438Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.277463Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.289534Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.289607Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.289627Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.305671Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.305750Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.305772Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:15:47.387518Z node 17 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxaefh8bh71fyc1x680cz0", SessionId: ydb://session/3?node_id=17&id=NTdiOWFlZWEtNmQyYWU1ZDctNTVjMDg2NjItNzc1MDI2Nzc=, Slow query, duration: 14.355908s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 23538, MsgBus: 5797 2026-01-10T12:14:34.051774Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705221317545093:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:34.051830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:34.111269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:34.693199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:34.693328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:34.837363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:34.914464Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:34.925588Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705217022577653:2081] 1768047273999830 != 1768047273999833 2026-01-10T12:14:34.946089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:35.052865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:35.189935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:35.272960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:35.272980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:35.272989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:35.273059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:36.268270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:14:36.268647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:14:36.283083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:36.284352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:14:36.290199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047276338, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:14:36.291126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-10T12:14:36.291160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-10T12:14:36.291339Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7593705221317545472:2244] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-10T12:14:36.291723Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705217022577621:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:36.291836Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705217022577624:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:36.291908Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7593705217022577627:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-10T12:14:36.292237Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7593705221317545472:2244] Ack update: ack to# [1:7593705221317545304:2146], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-10T12:14:36.292406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:14:36.293141Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705225612512938:2289][/Root] Path was updated to new version: owner# [1:7593705225612512929:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:36.293373Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705225612512941:2290][/Root] Path was updated to new version: owner# [1:7593705225612512930:2285], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:36.299767Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7593705221317545446:2228][/Root] Path was updated to new version: owner# [1:7593705221317545238:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:39.056031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705221317545093:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:39.056107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:39.378335Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:39.381652Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/d9xi/0010c9/r3tmp/spilling-tmp-runner/node_1_2c2de533-ebc160de-7cd8a95b-afb810d7, actor: [1:7593705242792382280:2308] 2026-01-10T12:14:39.381867Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/d9xi/0010c9/r3tmp/spilling-tmp-runner 2026-01-10T12:14:39.385036Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705242792382287:2500][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7593705221317545238:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0110 12:14:39.394495283 245923 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:39.394687416 245923 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:39.408795Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekx8w99504k5tz58nf1djqy", Request has 18444976026430.142900s seconds to be completed E0110 12:14:39.429066326 245922 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:39.429228795 245922 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:39.416940Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705242792382311:2502][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593705221317545238:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:39.417112Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593705242792382312:2503][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7593705221317545238:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:14:39.423123Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekx8w99504k5tz58nf1djqy", Created new session, sessionId: ydb://session/3?node_id=1&id=ZmVjOGE4MmMtNTBmMTA1NS1iNzIxYjlhNi04NGVhOTAzMg==, workerId: [1:7593705242792382326:2330], database: /Root, longSession: 1, local sessions count: 1 2026-01-10T12:14:39.423334Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekx8w99504k5tz58nf1djqy 2026-01-10T12:14:39.423442Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:14:39.423469Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:14:39.423494Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-10T12:14:39.444327Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0110 12:14:39.454784355 245922 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:14:39.454914304 245922 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:14:39.458299Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-10T12:14:39.469217Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekx8zbca8v3bhtckp2epawk, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVjOGE4MmMtNTBmMTA1NS1iNzIxYjlhNi04NGVhOTAzMg==, ... table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-10T12:15:52.592184Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [9:7593705555785944472:2339] TxId: 281474976710662. Ctx: { TraceId: 01kekxb6ccda6384j381zpt7yk, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjNkNjg0OWQtNWI3YzlmZC1mZmZhOGI5Yi00MzlkOTg0ZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-10T12:15:52.597148Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekxb6ccda6384j381zpt7yk, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjNkNjg0OWQtNWI3YzlmZC1mZmZhOGI5Yi00MzlkOTg0ZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705555785944476:2352] 2026-01-10T12:15:52.598103Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01kekxb6ccda6384j381zpt7yk, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjNkNjg0OWQtNWI3YzlmZC1mZmZhOGI5Yi00MzlkOTg0ZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7593705555785944477:2353] 2026-01-10T12:15:52.599065Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxb6ccda6384j381zpt7yk", Forwarded response to sender actor, requestId: 4, sender: [9:7593705555785944372:2338], selfId: [9:7593705534311107169:2265], source: [9:7593705555785944373:2339] 2026-01-10T12:15:52.599511Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NjNkNjg0OWQtNWI3YzlmZC1mZmZhOGI5Yi00MzlkOTg0ZQ==, workerId: [9:7593705555785944373:2339], local sessions count: 0 E0110 12:15:52.627122231 265100 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:15:52.627338306 265100 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:15:52.632395Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2026-01-10T12:15:53.641363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:53.674144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:53.674456Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:53.681982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:53.682261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:53.682505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:53.682638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:53.682735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:53.682876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:53.683010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:53.683174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:53.683296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:53.683398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:53.683604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:53.683735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:53.683864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:53.712116Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:53.712404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:53.712461Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:53.712622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:53.712784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:53.712849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:53.712881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:53.712965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:53.713010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:53.713040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:53.713074Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:53.713223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:53.713267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:53.713308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:53.713340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:53.713418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:53.713461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:53.713489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:53.713507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:53.713558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:53.713605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:53.713629Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:53.713664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:53.713690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:53.713712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:53.713878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:53.713968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:53.713993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:53.714081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:53.714109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:53.714129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:53.714171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:53.714210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:53.714236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:53.714275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:15:53.714314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:15:53.714342Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:15:53.714437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:15:53.714489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n;tablet_id=9437184;tx_id=101;this=137111928494400;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:15:54.301464Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=137111928494400;op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137318089646528;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:15:54.301583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=137111928494400;op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137318089646528;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:15:54.301664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=137111928494400;op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1768047354603;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137318089646528;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2026-01-10T12:15:54.302159Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:15:54.302358Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047354603 at tablet 9437184, mediator 0 2026-01-10T12:15:54.302423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-10T12:15:54.302793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:54.302909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:54.302968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:15:54.303076Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:15:54.321343Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1768047354603;tx_id=101;;switch_optimizer=0;switch_accessors=0; 2026-01-10T12:15:54.321483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:15:54.321640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-10T12:15:54.321729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-10T12:15:54.322016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:15:54.344313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-10T12:15:54.377004Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2026-01-10T12:15:54.381594Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-10T12:15:54.381680Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6120;operation_id=1c8b70d4-ee1e11f0-b695afcc-83b9bc40;in_flight=1;size_in_flight=6120; 2026-01-10T12:15:54.397663Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8392;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-10T12:15:54.400127Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6120;event=data_write_finished;writing_id=1c8b70d4-ee1e11f0-b695afcc-83b9bc40; 2026-01-10T12:15:54.400539Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=78;data_size=59;sum=78;count=1; 2026-01-10T12:15:54.400616Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=174;data_size=171;sum=174;count=2;size_of_meta=112; 2026-01-10T12:15:54.400686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=254;data_size=251;sum=254;count=1;size_of_portion=192; 2026-01-10T12:15:54.401505Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:15:54.401695Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:15:54.414893Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:15:54.415160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:15:54.446763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=137111928846080;op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137318089480128;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:15:54.446866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=137111928846080;op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137318089480128;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:15:54.446925Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=137111928846080;op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1768047354611;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137318089480128;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2026-01-10T12:15:54.447286Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:15:54.447448Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047354611 at tablet 9437184, mediator 0 2026-01-10T12:15:54.447522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-10T12:15:54.447874Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-10T12:15:54.460766Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-10T12:15:54.461212Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047354612 at tablet 9437184, mediator 0 2026-01-10T12:15:54.461320Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] execute at tablet 9437184 2026-01-10T12:15:54.461667Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:88;progress_tx_id=102;lock_id=1;broken=0; 2026-01-10T12:15:54.480594Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] complete at tablet 9437184 2026-01-10T12:15:54.480738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=102;lock_id=1;broken=0; 2026-01-10T12:15:54.481002Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:15:54.481075Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system |89.8%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> Sharding::XXUsage >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> Sharding::XXUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:32.806301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:32.962802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:32.975248Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:32.975651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:32.975763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:33.469219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:33.469346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:33.475901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047329148948 != 1768047329148951 2026-01-10T12:15:33.488244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:33.543126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:33.660298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:34.017457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:34.037175Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:34.158087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:34.203358Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:15:34.203630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:34.255189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:34.255341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:34.257096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:34.257198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:34.257268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:34.257686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:34.257835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:34.257941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:15:34.272673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:34.309359Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:34.309590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:34.309709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:15:34.309754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.309787Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:34.309830Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.310307Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:34.310456Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:34.310572Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.310620Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.310669Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:34.310710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.310773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:15:34.310940Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:34.311190Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:34.312196Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:34.314320Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.326194Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:34.326300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:34.453138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:15:34.460558Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:34.460641Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.462099Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.462158Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:34.462304Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:34.462569Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:15:34.462707Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:34.462949Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.463031Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:34.465688Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:34.466138Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.467799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:34.467851Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.468914Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:34.469006Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.470390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.470440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.470484Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:34.470676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:34.470730Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:34.470808Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.475347Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.477702Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:34.477895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:34.477965Z node 1 :TX_DATASHARD DEBUG: datashar ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:54.082612Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:54.085089Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:54.085136Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:54.085200Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:54.085273Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:54.085339Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:54.085437Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.086515Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:54.088046Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:54.088231Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:54.088296Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:54.098963Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:54.099128Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:54.099183Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-10T12:15:54.099224Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-10T12:15:54.100427Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:54.113962Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:54.277468Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:54.277550Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.277949Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:54.278011Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:54.278070Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:54.278285Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-10T12:15:54.278424Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:54.278692Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:54.279793Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:54.331242Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-10T12:15:54.331370Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:54.331416Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:54.331463Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.331558Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:54.331632Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-10T12:15:54.331731Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.333877Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-10T12:15:54.333969Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:54.342475Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:54.342608Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:54.342691Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:54.343596Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:54.343760Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:54.348730Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:54.354854Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:54.518086Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:54.521509Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:15:54.547287Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:54.650776Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-10T12:15:54.651154Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-10T12:15:54.651442Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047354651341 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:54.651599Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-10T12:15:54.664774Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:54.664875Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.758959Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-10T12:15:54.759252Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047354759144 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:54.759406Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-10T12:15:54.770330Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:54.770401Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:54.772236Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1193:2976], serverId# [4:1194:2977], sessionId# [0:0:0] 2026-01-10T12:15:54.778325Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2978], serverId# [4:1196:2979], sessionId# [0:0:0] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2026-01-10T12:15:42.404443Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705515579401847:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:42.412899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.507009Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705513720174372:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:42.509933Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.521724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.521886Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.919660Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.974198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:43.011833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:43.011977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:43.018331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:43.018539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:43.029377Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:15:43.029606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:43.051930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:43.184881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:43.187824Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705515579401797:2082] 1768047342373233 != 1768047342373236 2026-01-10T12:15:43.195177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.214081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.435213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.516789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001641/r3tmp/yandexPmTNCS.tmp 2026-01-10T12:15:43.516832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001641/r3tmp/yandexPmTNCS.tmp 2026-01-10T12:15:43.517014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001641/r3tmp/yandexPmTNCS.tmp 2026-01-10T12:15:43.517121Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:43.521908Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.606717Z INFO: TTestServer started on Port 20957 GrpcPort 3625 PQClient connected to localhost:3625 2026-01-10T12:15:43.896463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:44.095757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:47.141905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705537054239775:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.142103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.142583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705537054239787:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.142645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705537054239788:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.142813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.147061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:47.160588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705537054239828:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.160782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.188260Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705537054239791:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:15:47.383142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705515579401847:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.383210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.494983Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705513720174372:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.495057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.532897Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705537054239881:3097] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:47.567455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:47.570091Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593705535195011158:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:47.571009Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705537054239904:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:47.572504Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDc4NTZmYWUtNzdiOGExZDEtMzc3ODllYTMtNjhiMTY2YjI=, ActorId: [1:7593705537054239772:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxb1dr6vg99f569kagg05d, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:47.572636Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTc4YjMzYmMtMzc5Mzg1OWMtYmFlMzdjNDYtNTVkMDNmNDI=, ActorId: [2:7593705535195011139:2306], ActorState: ExecuteState, LegacyTraceId: 01kekxb1g3fc73zp3rw4kybskv, ReplyQue ... Init] bootstrapping 6 [1:7593705567119012195:2468] 2026-01-10T12:15:54.446007Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7593705565259782730:2368] 2026-01-10T12:15:54.447762Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7593705565259782735:2367] 2026-01-10T12:15:54.447538Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7593705567119012195:2468] ===Query complete 2026-01-10T12:15:54.447840Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][5][StateInit] bootstrapping 5 [1:7593705567119012205:2466] 2026-01-10T12:15:54.450258Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][9][StateInit] bootstrapping 9 [2:7593705565259782741:2369] 2026-01-10T12:15:54.450202Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7593705567119012205:2466] 2026-01-10T12:15:54.452683Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7593705565259782741:2369] 2026-01-10T12:15:54.454629Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7593705565259782742:2369] 2026-01-10T12:15:54.456432Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7593705565259782746:2371] 2026-01-10T12:15:54.454431Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][10][StateInit] bootstrapping 10 [1:7593705567119012206:2466] 2026-01-10T12:15:54.456782Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7593705567119012206:2466] 2026-01-10T12:15:54.457612Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037898][Partition][14][StateInit] bootstrapping 14 [1:7593705567119012219:2467] 2026-01-10T12:15:54.459733Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7593705565259782742:2369] 2026-01-10T12:15:54.459779Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7593705565259782746:2371] 2026-01-10T12:15:54.459735Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7593705567119012219:2467] 2026-01-10T12:15:54.461805Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037898][Partition][11][StateInit] bootstrapping 11 [1:7593705567119012220:2467] 2026-01-10T12:15:54.461989Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.463519Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.463642Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7593705565259782748:2371] 2026-01-10T12:15:54.464003Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7593705567119012220:2467] 2026-01-10T12:15:54.465921Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7593705565259782748:2371] 2026-01-10T12:15:54.467283Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.470796Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.470867Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.474292Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.474711Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:15:54.475163Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2026-01-10T12:15:54.485894Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012315:4062] connected; active server actors: 1 2026-01-10T12:15:54.486169Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 1 2026-01-10T12:15:54.486186Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-10T12:15:54.486197Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 1 2026-01-10T12:15:54.486211Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-10T12:15:54.486221Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 1 2026-01-10T12:15:54.486232Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-10T12:15:54.486242Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 1 2026-01-10T12:15:54.486255Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 1 2026-01-10T12:15:54.486265Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 1 2026-01-10T12:15:54.486276Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 1 2026-01-10T12:15:54.486285Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 1 2026-01-10T12:15:54.486295Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 1 2026-01-10T12:15:54.486303Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 1 2026-01-10T12:15:54.486313Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 1 2026-01-10T12:15:54.486325Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 1 2026-01-10T12:15:54.487404Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012315:4062] disconnected. 2026-01-10T12:15:54.485473Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705567119012306:4059]: Request location 2026-01-10T12:15:54.487449Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012315:4062] disconnected; active server actors: 1 2026-01-10T12:15:54.487044Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705567119012306:4059]: Got location 2026-01-10T12:15:54.487470Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012315:4062] disconnected no session 2026-01-10T12:15:54.488022Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012318:4065] connected; active server actors: 1 2026-01-10T12:15:54.487706Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705567119012316:4063]: Request location 2026-01-10T12:15:54.488312Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-10T12:15:54.488455Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705567119012316:4063]: Got location 2026-01-10T12:15:54.488332Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-10T12:15:54.488339Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-10T12:15:54.488697Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012318:4065] disconnected. 2026-01-10T12:15:54.488714Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012318:4065] disconnected; active server actors: 1 2026-01-10T12:15:54.488726Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012318:4065] disconnected no session 2026-01-10T12:15:54.493072Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705567119012319:4066]: Request location 2026-01-10T12:15:54.493452Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705567119012321:4068] connected; active server actors: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 7622816676894411104 14946412977187855061 5994235360220560457 8758630922041774129 1368944412412600107 9710866484198428177 14949373331127065089 12944877536978537109 7640263046152009861 7816869795022199651 16783532189268828677 5384114379549043341 5543922001902845055 18421151893992952648 8048843628998877264 4780612034134044428 1285022703928868028 2679880553583238695 9715305820084995853 2572128553746973593 5448799595691788981 15644429289244567054 16124605756796536952 17191212556450584573 648318961548569789 11718300396152897958 14670261163465053486 14759859752771491188 17553835872874848074 610430720485316319 1440108869279352788 11441037447607944612 7096528473000810615 16348285700637306902 13609226885308993012 11873196038214612163 1019670286577660149 4416069496412843551 17060181261322344241 641477035096625861 8036709149873918507 16254652110267096251 15044112887236531126 14518868335532852374 14259295741045764469 12764673290169517091 6682124420208487070 4361557108080518273 13671921954068537932 16438091889553126561 164852255672563262 13736207131316203400 6976069558543548004 669473759330737082 14394291816222626545 1875886961556362614 12994769928568897476 6442355241701975783 10080687384765370810 4310474480129819589 10242666073635445894 2594702788311947641 10472545181188979118 12460356407677417951 12291554800312064424 6397482098880394808 11893526619431436047 6227904187639257666 2854795992000786632 14920233779230571909 8563941021944196161 17416119326203967126 12201368420570403896 5104400228247410742 13141324473855717135 12269139086638413555 11022396341700543369 8958331862885417399 3874580888746306852 301698433457694209 11806979466381907949 610524638323738997 7935650571047159325 17125297801995737567 7629292217403487078 9122611553043504588 3890597110610234140 5165752750014563968 8799488914219463608 8261716645286790325 |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails >> TTopicApiDescribes::DescribeConsumer [GOOD] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> TBackupTests::BackupUuidColumn[Raw] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex |89.9%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:54.757007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:54.757123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:54.757174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:54.757211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:54.757250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:54.757283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:54.757363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:54.757436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:54.758335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:54.758666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:54.843621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:54.843679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:54.855528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:54.855661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:54.855826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:54.864903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:54.865139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:54.865865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:54.866115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:54.868505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:54.868690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:54.869925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:54.869984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:54.870060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:54.870107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:54.870172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:54.870441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:55.020542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.021984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.022039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.022102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.022196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.022279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:55.022340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30A9CAAF-B9B1-4114-99A6-DCD4FC832BDF amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2026-01-10T12:15:56.256778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:56.256918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:15:56.258528Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:892:2850], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2026-01-10T12:15:56.258805Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:891:2848] 2026-01-10T12:15:56.258946Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:892:2850], sender# [1:891:2848], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-10T12:15:56.259487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:56.259631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:56.259704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:15:56.259764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:15:56.259840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:15:56.259978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:9826 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FD8FCBE6-1BF6-4F90-83F6-3B3BFA55668C amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-10T12:15:56.264834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:15:56.267015Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:892:2850], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-10T12:15:56.267095Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:892:2850], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:15:56.267273Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:891:2848], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:15:56.289063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.289142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:15:56.289361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.289479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.289591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:56.289798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:56.290293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.290332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:15:56.290456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.290552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:56.290677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:56.290715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:56.290780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:15:56.290860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:15:56.290902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:15:56.291030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:56.297031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:56.297536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:56.298051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:56.298130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:15:56.298302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:56.298349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:56.298402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:56.298444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:56.298491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:15:56.298618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:785:2752] message: TxId: 102 2026-01-10T12:15:56.298685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:56.298748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:15:56.298802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:15:56.298993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:15:56.301502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:15:56.301567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:870:2829] TestWaitNotification: OK eventTxId 102 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:36.425515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:36.555913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:36.567015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:36.567605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:36.567731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:36.978409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:36.978585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:36.986608Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047332968410 != 1768047332968413 2026-01-10T12:15:37.006482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:37.057583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:37.154878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:37.669073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:37.701525Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:37.834577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:37.953539Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:15:37.953880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:38.066388Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:15:38.066678Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:38.090033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:38.090302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:38.102380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:38.102562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:38.102645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:38.103179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:38.103428Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:38.103545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:15:38.104270Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:38.104405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:38.106061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:15:38.106151Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:15:38.106221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:15:38.106559Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:38.106664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:38.106742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:15:38.120781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:38.200742Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:38.201089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:38.201292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:15:38.201367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:38.201419Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:38.201465Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:38.201865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:38.201912Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:15:38.201983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:38.202052Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:15:38.202079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:38.202153Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:15:38.202191Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:38.202649Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:38.202770Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:38.203023Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:38.203071Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:38.203147Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:38.203194Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:38.203245Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:15:38.203309Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:15:38.203463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:15:38.203550Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:38.203578Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:38.203619Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:15:38.203671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:38.204277Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:38.204607Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:38.204729Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:38.205259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:891:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-10T12:15:38.205493Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:15:38.205729Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:38.205795Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-10T12:15:38.207894Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:38.207992Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:38.219009Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:38.219156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:38.219841Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:15:38.219925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:38.362531Z node 1 :TX_DATASHARD ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:55.297279Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:55.298902Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:55.298961Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:55.299019Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:55.299099Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:55.299163Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:55.299261Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:55.300607Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:55.302197Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:55.302364Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:55.302441Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:55.314485Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:55.314708Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:55.314758Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-10T12:15:55.314802Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-10T12:15:55.315858Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:55.330118Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:55.501056Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:55.501144Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:55.501585Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:55.501649Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:55.501714Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:55.501934Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-10T12:15:55.502089Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:55.502363Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:55.503397Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:55.551100Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-10T12:15:55.551272Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:55.551326Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:55.551384Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:55.551486Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:55.551558Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-10T12:15:55.551681Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:55.554301Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-10T12:15:55.554402Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:55.567329Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:55.567457Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:55.567539Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:55.568518Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:55.568692Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:55.573896Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:55.581494Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:55.764185Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:55.767837Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:15:55.795868Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:55.882876Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-10T12:15:55.883349Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-10T12:15:55.883687Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047355883565 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:55.883893Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-10T12:15:55.895329Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:55.895440Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:55.992068Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-10T12:15:55.992584Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047355992454 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:55.992772Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-10T12:15:56.004952Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:56.005045Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:56.007363Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1193:2976], serverId# [4:1194:2977], sessionId# [0:0:0] 2026-01-10T12:15:56.014529Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2978], serverId# [4:1196:2979], sessionId# [0:0:0] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2026-01-10T12:15:42.420632Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705514408345059:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:42.422490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.539717Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.573043Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705511886834358:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:42.573117Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.629120Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.905981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.935363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:43.096645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:43.096727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:43.109532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:43.109601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:43.114826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:43.209847Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:15:43.335344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:43.338289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:43.348493Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.356490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.444699Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.614931Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.920824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001642/r3tmp/yandexbmeQDV.tmp 2026-01-10T12:15:43.920854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001642/r3tmp/yandexbmeQDV.tmp 2026-01-10T12:15:43.920994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001642/r3tmp/yandexbmeQDV.tmp 2026-01-10T12:15:43.921107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:43.982409Z INFO: TTestServer started on Port 11943 GrpcPort 10402 PQClient connected to localhost:10402 2026-01-10T12:15:44.546969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:44.729357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:47.403572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705514408345059:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.403651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.537505Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705533361671232:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.537577Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705533361671221:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.537699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.538219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705533361671236:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.538263Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:47.543212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:47.573311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705511886834358:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.573376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.582423Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705533361671235:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:15:47.677237Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705533361671264:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:48.119836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:48.119328Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593705533361671278:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:48.120499Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MmI1YTlmZjUtM2YzZTg4M2MtNjlkN2E5ZmQtZTdjOGM4ZWI=, ActorId: [2:7593705533361671219:2306], ActorState: ExecuteState, LegacyTraceId: 01kekxb1te36rn1ewghh9e0e2j, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:48.121417Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705535883183002:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:48.121877Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzFjNWEzZDMtYjVmZjE4NS1lYzAxMWQyMC05ZDhjYjY0YQ==, ActorId: [1:7593705535883182974:2332], ActorState: ExecuteState, LegacyTraceId: 01kekxb1x9fzdgc4zw2taqwxy6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:48.122802Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At functio ... ats { min_last_write_time { seconds: 1768047355 nanos: 218000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2026-01-10T12:15:56.089089Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-10T12:15:56.089218Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2026-01-10T12:15:56.089806Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705574537890626:2522]: Request location 2026-01-10T12:15:56.090309Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705574537890628:2523] connected; active server actors: 1 2026-01-10T12:15:56.090502Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-10T12:15:56.090519Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-10T12:15:56.090530Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-10T12:15:56.090541Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-10T12:15:56.090888Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705574537890626:2522]: Got location 2026-01-10T12:15:56.090551Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-10T12:15:56.090561Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-10T12:15:56.090570Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-10T12:15:56.090604Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-10T12:15:56.090615Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-01-10T12:15:56.090624Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-10T12:15:56.090637Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-10T12:15:56.090649Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-01-10T12:15:56.090661Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-01-10T12:15:56.090677Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-10T12:15:56.090689Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2026-01-10T12:15:56.091203Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705574537890628:2523] disconnected. 2026-01-10T12:15:56.091226Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705574537890628:2523] disconnected; active server actors: 1 2026-01-10T12:15:56.091239Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705574537890628:2523] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1768047355060 tx_id: 281474976710670 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe topic with no stats or location 2026-01-10T12:15:56.096602Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-10T12:15:56.096688Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1768047355060 tx_id: 281474976710670 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe bad topic 2026-01-10T12:15:56.101669Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-10T12:15:56.101754Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2026-01-10T12:15:41.748072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705508891531968:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:41.751519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:41.835491Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705510055519400:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:41.835578Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:41.874086Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:41.921831Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:41.922155Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:42.214763Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.233861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.288848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:42.288991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:42.289264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:42.289350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:42.312901Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:15:42.313204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:42.325983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:42.491382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:42.557349Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:42.569031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:42.784843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:42.836248Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:42.852011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001646/r3tmp/yandexMS9xWK.tmp 2026-01-10T12:15:42.852068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001646/r3tmp/yandexMS9xWK.tmp 2026-01-10T12:15:42.852275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001646/r3tmp/yandexMS9xWK.tmp 2026-01-10T12:15:42.852386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:42.948591Z INFO: TTestServer started on Port 20172 GrpcPort 62829 PQClient connected to localhost:62829 2026-01-10T12:15:43.282623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:43.496481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:46.386818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705531530356078:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.386915Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705531530356089:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.386988Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.388960Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705531530356093:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.406576Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.411928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:46.438702Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705531530356092:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:15:46.542303Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705531530356121:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:46.822472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705508891531968:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:46.822705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:46.835014Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705510055519400:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:46.835101Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:46.841769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:46.843923Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705530366369905:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:46.846770Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDYxODU2NzktZjMwMjQ3OGMtNTk3MWMyMjktZDdmZDgyOTE=, ActorId: [1:7593705530366369872:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxb0rr5mhpvhdd84cr1rg7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:46.849124Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:15:46.852686Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593705531530356128:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:46.853347Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTEwMDQwYjYtYzg4MThjNTUtYjNmNmJhNmMtNWMyNTBkMDk=, ActorId: [2:7593705531 ... nanos: 119000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1768047355 nanos: 110000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1768047355 nanos: 126000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1768047355 nanos: 103000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1768047355 nanos: 118000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2026-01-10T12:15:55.935576Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-10T12:15:55.935672Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2026-01-10T12:15:55.936533Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705569021077770:2539]: Request location 2026-01-10T12:15:55.937071Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705569021077772:2540] connected; active server actors: 1 2026-01-10T12:15:55.937349Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-10T12:15:55.937370Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-10T12:15:55.937381Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-10T12:15:55.937395Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-10T12:15:55.937405Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-10T12:15:55.937416Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-10T12:15:55.937425Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-10T12:15:55.937456Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-10T12:15:55.937472Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-01-10T12:15:55.937481Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-10T12:15:55.937491Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-10T12:15:55.937501Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-01-10T12:15:55.937510Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-01-10T12:15:55.937519Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-10T12:15:55.937529Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2026-01-10T12:15:55.937740Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705569021077770:2539]: Got location 2026-01-10T12:15:55.938104Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705569021077772:2540] disconnected. 2026-01-10T12:15:55.938127Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705569021077772:2540] disconnected; active server actors: 1 2026-01-10T12:15:55.938139Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705569021077772:2540] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1768047354843 tx_id: 281474976715671 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-01-10T12:15:55.940727Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-10T12:15:55.940797Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1768047354843 tx_id: 281474976715671 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2026-01-10T12:15:55.945000Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-10T12:15:55.945087Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:26.539418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:26.669696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:26.679752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:26.680460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:26.680582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:27.286033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:27.286173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:27.293657Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047322091455 != 1768047322091458 2026-01-10T12:15:27.306960Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:27.361338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:27.475258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:27.888117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:27.905427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:28.021027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:28.061714Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:15:28.062029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:28.119891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:28.120028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:28.122359Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:28.122458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:28.122537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:28.122929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:28.123073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:28.123163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:15:28.136686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:28.232519Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:28.232780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:28.232922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:15:28.232967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:28.233009Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:28.233047Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.233613Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:28.233746Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:28.233861Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.233922Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:28.233964Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:28.234036Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.234112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:15:28.234293Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:28.234558Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:28.234680Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:28.236980Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:28.247719Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:28.247833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:28.394233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:15:28.400831Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:28.400935Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.402390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.402469Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:28.402518Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:28.402844Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:15:28.403006Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:28.403227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:28.403306Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:28.405647Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:28.406300Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:28.408128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:28.408184Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.409100Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:28.409198Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.410570Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:28.410640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:28.410687Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:28.410760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:28.410820Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:28.410906Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:28.421938Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:28.423745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:28.423937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:28.424008Z node 1 :TX_DATASHARD DEBUG: datashar ... 38], version# 2, step# 2500, txId# 281474976715658, at tablet# 72075186224037888 2026-01-10T12:15:56.569127Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:56.615140Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-10T12:15:56.615272Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:56.615316Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:56.615384Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:56.615454Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:56.615539Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-10T12:15:56.615652Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:56.618098Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-10T12:15:56.618193Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:56.625851Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:56.625936Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:56.626071Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:56.626805Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:56.626947Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:56.631045Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:56.640755Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:56.800327Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:56.803815Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:15:56.832711Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:56.917581Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2957], serverId# [4:1167:2958], sessionId# [0:0:0] 2026-01-10T12:15:56.917995Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-10T12:15:56.918218Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047356918111 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:56.918492Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-10T12:15:56.929628Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-10T12:15:56.929751Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:56.963192Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:15:56.966152Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:56.966537Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2026-01-10T12:15:56.966650Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2026-01-10T12:15:56.966710Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2026-01-10T12:15:56.978051Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:57.093672Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:57.093769Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:57.094091Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:57.094160Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:57.094248Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715662] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:57.094647Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 3500:281474976715662 keys extracted: 0 2026-01-10T12:15:57.094830Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:57.095286Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:57.095407Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2026-01-10T12:15:57.096722Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 3, step# 3500, txId# 281474976715662, at tablet# 72075186224037888 2026-01-10T12:15:57.096887Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 3500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:15:57.097458Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:57.100880Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2026-01-10T12:15:57.101015Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:57.102483Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:57.102587Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-10T12:15:57.102759Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:57.102837Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-01-10T12:15:57.103005Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-10T12:15:57.103084Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:57.106100Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2026-01-10T12:15:57.106233Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:57.112874Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1209:2995], serverId# [4:1210:2996], sessionId# [0:0:0] 2026-01-10T12:15:57.131469Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1212:2998], serverId# [4:1213:2999], sessionId# [0:0:0] |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> Channels20::CaIntegrationTrivial [GOOD] >> Channels20::CaIntegrationAgg >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> TBackupTests::BackupUuidColumn[Raw] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:38.283720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:38.582659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:38.599840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:38.600354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:38.600459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:39.463763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:39.463900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:39.470645Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047333825962 != 1768047333825965 2026-01-10T12:15:39.490686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:39.540102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:39.647457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:39.969812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:39.987397Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:40.107234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:40.176827Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-10T12:15:40.177129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:40.242851Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-01-10T12:15:40.243083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:40.262776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:40.262954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:40.268934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:40.269058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:40.269156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:40.269548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:40.269737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:40.269828Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-10T12:15:40.270384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:40.270492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:40.273564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:15:40.273642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:15:40.273698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:15:40.274005Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:40.274116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:40.274210Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-10T12:15:40.288874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:40.339798Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:40.340033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:40.340444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-10T12:15:40.340514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:40.340564Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:40.340598Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:40.340928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:40.340975Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:15:40.341044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:40.341113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-10T12:15:40.341139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:40.341182Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:15:40.341231Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:40.341679Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:40.341785Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:40.342059Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:40.342103Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:40.342150Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:40.342218Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:40.342273Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:15:40.342337Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:15:40.342492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:901:2776], sessionId# [0:0:0] 2026-01-10T12:15:40.342563Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:15:40.342592Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:40.342619Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:15:40.342659Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:40.343133Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:40.343400Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:40.343505Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:40.343987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:891:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-10T12:15:40.344218Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:15:40.344393Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-10T12:15:40.344457Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-10T12:15:40.346597Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:40.346685Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:40.358500Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:40.358601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:40.359163Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:15:40.359222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:40.495038Z node 1 :TX_DATASHARD ... E TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:57.325853Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:57.326277Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:57.327878Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:57.327945Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:57.330990Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-01-10T12:15:57.331043Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:57.331567Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:57.331667Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:57.333006Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-10T12:15:57.333071Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:57.333954Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:57.334014Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:57.334104Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:57.334188Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:57.334259Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:57.334355Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:57.336614Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:57.336741Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:57.337137Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:15:57.337196Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:15:57.337239Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:15:57.337307Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:57.337362Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:57.337435Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:57.341894Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:57.342136Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:15:57.342221Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:15:57.342914Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:57.343398Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-10T12:15:57.343460Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:15:57.355445Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:57.355623Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1009:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:57.355723Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:57.356421Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:57.356635Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:57.362148Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:57.370308Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:57.370463Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:57.490555Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:57.490712Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:15:57.493875Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1013:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:15:57.530883Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:57.621641Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2925], serverId# [4:1140:2926], sessionId# [0:0:0] 2026-01-10T12:15:57.622123Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-10T12:15:57.622450Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1768047357622347 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:57.622633Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-10T12:15:57.633871Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:57.633976Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:57.710076Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-10T12:15:57.710449Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1768047357710325 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:57.710652Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1768047357710325 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-10T12:15:57.710778Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-10T12:15:57.722025Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-10T12:15:57.722115Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:15:57.727084Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1179:2957], serverId# [4:1180:2958], sessionId# [0:0:0] 2026-01-10T12:15:57.735586Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1181:2959], serverId# [4:1182:2960], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2026-01-10T12:15:41.998565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705510858641768:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:41.998633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.077652Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705511745805569:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:42.077703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:42.076416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:42.076917Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.178798Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:15:42.567357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.580463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:42.624542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:42.624640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:42.626717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:42.626807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:42.694234Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:15:42.694665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:42.695470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:42.785871Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:43.032182Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.033906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.037111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001643/r3tmp/yandexUWRjI0.tmp 2026-01-10T12:15:43.037171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001643/r3tmp/yandexUWRjI0.tmp 2026-01-10T12:15:43.037354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001643/r3tmp/yandexUWRjI0.tmp 2026-01-10T12:15:43.037467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:43.092169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:43.116378Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:43.196699Z INFO: TTestServer started on Port 28962 GrpcPort 6820 PQClient connected to localhost:6820 2026-01-10T12:15:43.439738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:43.607670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:46.761460Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705528925675122:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.762487Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.764342Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705528925675146:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.764445Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705528925675147:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.764638Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.770286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705532333479630:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.770384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705532333479612:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.770482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.772176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705532333479635:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.772268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:46.774573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:46.799204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705532333479634:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:15:46.807252Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705528925675158:2176] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:15:46.808302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2026-01-10T12:15:46.808543Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705528925675157:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:15:46.886834Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705532333479733:3040] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:46.911263Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705528925675185:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:47.079896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705511745805569:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.079967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.217137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705510858641768:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:47.217438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:47.243214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:47.258391Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593705528925675192:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:47.261924Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705532333479743:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:15:47.261799Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjZkYjQxYy1iOTgwYzJiOC0zOTIwMWM5OC00MDc1NDQ3NA==, ActorId: [2:7593705528925675114:2303], ActorState: ExecuteState, LegacyTraceId: 01kekxb1262kwcxfgyq3vj084x, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:47.264976Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWY1ZDk3MmUtYjgxZDQyODUtNmEwMTg4ZDQtNGM0N2VlOTE=, ActorId: [1:7593705532333479601:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxb12g4sapcekm3whyengx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:15:47.297520Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:15:47.300341Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:15:47.334688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:47.480733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593705536628447502:3390] === CheckClustersList. Ok >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] |89.9%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:58.109130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:58.109263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.109300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:58.109335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:58.109393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:58.109424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:58.109503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.109584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:58.110472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:58.110807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:58.195986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:58.196091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:58.212249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:58.212356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:58.212490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:58.220911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:58.221143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:58.221702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:58.221881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:58.223898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.224059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:58.225206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:58.225281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.225352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:58.225389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:58.225433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:58.225632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:58.353944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.355930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.356004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.356113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.356201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2026-01-10T12:15:59.359955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:15:59.360094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:15:59.360259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:15:59.373562Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:820:2789], attempt# 0 2026-01-10T12:15:59.397253Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:820:2789], sender# [1:819:2787] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:15:59.405087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:59.405158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:28314 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6BA86312-38D2-49A5-B942-22E5C92F1594 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-10T12:15:59.405502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.405562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:15:59.405660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.405717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:15:59.406514Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:15:59.409742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:59.409876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:59.409926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:15:59.409974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:15:59.410030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:15:59.410143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:28314 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94AF3EEE-483E-4885-BEC2-FBE13325DEED amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-10T12:15:59.414017Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-10T12:15:59.414169Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:819:2787] 2026-01-10T12:15:59.414299Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:820:2789], sender# [1:819:2787], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-10T12:15:59.415019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28314 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50591ED1-4DE5-496E-8076-34CB64F69AF2 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2026-01-10T12:15:59.421318Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2026-01-10T12:15:59.421391Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:820:2789], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:15:59.421572Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:819:2787], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:15:59.435288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:15:59.435351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:15:59.435540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:15:59.435693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:15:59.435771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:59.435808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.435849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:15:59.435891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:15:59.436127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:59.440939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.441351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.441408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:15:59.441507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:59.441546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.441576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:59.441597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.441623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:15:59.441670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:738:2716] message: TxId: 102 2026-01-10T12:15:59.441705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.441733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:15:59.441783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:15:59.441907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:15:59.445026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:15:59.445079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:803:2773] TestWaitNotification: OK eventTxId 102 >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:58.338317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:58.338416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.338453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:58.338487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:58.338524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:58.338559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:58.338609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.338706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:58.339627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:58.339917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:58.425166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:58.425218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:58.437122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:58.437206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:58.437315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:58.451596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:58.451860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:58.452617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:58.452879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:58.455607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.455783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:58.457059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:58.457132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.457226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:58.457289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:58.457357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:58.457616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:58.605734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.608794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.608931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:58.609726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2026-01-10T12:15:59.590858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:15:59.590990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:15:59.591163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:15:59.602765Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:828:2797], attempt# 0 2026-01-10T12:15:59.620141Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:828:2797], sender# [1:827:2796] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:15:59.625467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:59.625549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:15:59.625881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.625973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:15:59.626781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.626873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:21863 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EF27652F-5298-4045-9674-EA7BC8AD2AF1 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:15:59.627974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:59.628229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:15:59.628302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:15:59.628342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:15:59.628378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:15:59.628453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:15:59.628626Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:21863 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1827A226-7AF4-4E6E-ADC9-E3746A0517E9 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-10T12:15:59.635695Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-10T12:15:59.635846Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:827:2796] 2026-01-10T12:15:59.635980Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:828:2797], sender# [1:827:2796], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-10T12:15:59.636628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:21863 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DEFF2AA8-2C61-4D96-88AC-E15A90755FC4 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-01-10T12:15:59.639838Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-10T12:15:59.639908Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:828:2797], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:15:59.640118Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:827:2796], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:15:59.649991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:59.650058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:15:59.650270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:59.650412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:15:59.650519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:59.650561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.650618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:15:59.650690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:15:59.650894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:59.652886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.653136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.653233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:15:59.653327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:59.653360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.653403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:15:59.653431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.653472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:15:59.653525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:15:59.653561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:15:59.653590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:15:59.653615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:15:59.653719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:15:59.655696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:15:59.655757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:812:2782] TestWaitNotification: OK eventTxId 102 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:58.990489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:58.990625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.990684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:58.990731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:58.990771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:58.990806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:58.990862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.990948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:58.991946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:58.992300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:59.080461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:59.080532Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:59.098915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:59.099037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:59.099203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:59.109039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:59.109264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:59.110043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:59.110255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:59.112730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.112900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:59.113988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:59.114088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.114154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:59.114199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:59.114262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:59.114535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.266592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.269998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.270940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.271008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.271082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.271158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2057594046678944 2026-01-10T12:16:00.255183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:16:00.255312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:16:00.255457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:00.272413Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:820:2789], attempt# 0 2026-01-10T12:16:00.295575Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:820:2789], sender# [1:819:2787] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:16:00.298999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:00.299073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:16:00.299503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:00.299567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:16:00.300147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.300227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:16:00.301275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:4559 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1B0AC2CA-EA68-49EA-AF19-DA5242937FAA 2026-01-10T12:16:00.301429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:00.301507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:16:00.301554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:16:00.301608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:16:00.301705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:16:00.305304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-10T12:16:00.309231Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:4559 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 649D2D3A-4548-42B0-AEA6-393E78CCED0F amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-10T12:16:00.314381Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-10T12:16:00.314492Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:819:2787] 2026-01-10T12:16:00.314756Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:820:2789], sender# [1:819:2787], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4559 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2A365BCF-9431-4BF8-902A-F243E2B31CC7 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2026-01-10T12:16:00.317737Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:820:2789], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2026-01-10T12:16:00.317851Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:820:2789], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:16:00.325260Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:819:2787], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:00.328751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:16:00.328820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:00.329029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:16:00.329177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-10T12:16:00.329306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:00.329349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.329393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:00.329438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:00.329690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:00.332120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.332322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.332375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:00.332485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.332523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.332582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.332617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.332658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:00.332725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2720] message: TxId: 102 2026-01-10T12:16:00.332768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.332805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:00.332841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:00.333006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:00.335522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:00.335595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:804:2774] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:59.009586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:59.009666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:59.009697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:59.009731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:59.009763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:59.009791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:59.009831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:59.009891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:59.010633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:59.010883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:59.093140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:59.093216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:59.106044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:59.106168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:59.106345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:59.118810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:59.119000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:59.119643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:59.119825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:59.122468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.122608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:59.123533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:59.123582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.123637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:59.123675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:59.123724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:59.123906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.256992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.258971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.259070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.259148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.259229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.259311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2057594046678944 2026-01-10T12:16:00.263850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:16:00.263974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:16:00.264173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:00.280358Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:828:2797], attempt# 0 2026-01-10T12:16:00.303654Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:828:2797], sender# [1:827:2796] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:16:00.309017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:00.309074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:16:00.309331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:00.309398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:16:00.309948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.310006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:16:00.310864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:00.310978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:00.311030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:16:00.311071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:16:00.311115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:16:00.311203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3611 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6EA9A60F-01D6-40C9-962E-EE81B347A325 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-10T12:16:00.314822Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2026-01-10T12:16:00.317286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3611 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 596AE360-A4A3-41D5-8E78-7B99F4686584 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-10T12:16:00.321311Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-10T12:16:00.321439Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:827:2796] 2026-01-10T12:16:00.321640Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:828:2797], sender# [1:827:2796], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:3611 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A62B60D0-58B6-4C51-94F1-35833941B027 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-10T12:16:00.326442Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:828:2797], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-10T12:16:00.326509Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:828:2797], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:16:00.326713Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:827:2796], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:00.348591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.348664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:00.348842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.348975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.349062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:00.349117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.349174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:00.349228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:00.349393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:00.351933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.352347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.352418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:00.352531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.352575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.352615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.352652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.352707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:00.352788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:16:00.352865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.352909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:00.352948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:00.353084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:00.356798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:00.356902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:812:2782] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:59.361254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:59.361360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:59.361407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:59.361448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:59.361492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:59.361526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:59.361590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:59.361665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:59.362737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:59.363066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:59.465751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:59.465820Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:59.482948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:59.483061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:59.483224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:59.492230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:59.492480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:59.493223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:59.493456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:59.496402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.496578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:59.497856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:59.497940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:59.498023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:59.498076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:59.498157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:59.498417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.672901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.673971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.674995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.675100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.675190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.675279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tipart# 0, uploadId# (empty maybe) 2026-01-10T12:16:00.898149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:00.898303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:00.898357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:16:00.898402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:16:00.898448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:16:00.898557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:16:00.898956Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:897:2853], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:24117 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 493A16A7-9F81-4669-9AA8-2AB5E3431279 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:16:00.906734Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:892:2850], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2026-01-10T12:16:00.907049Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:891:2848] 2026-01-10T12:16:00.907155Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:892:2850], sender# [1:891:2848], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:16:00.913606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:24117 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 865F1F80-35C1-4977-89CA-4D300A939E01 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-01-10T12:16:00.920886Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:892:2850], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-10T12:16:00.920962Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:892:2850], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:16:00.921143Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:891:2848], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:00.946480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.946557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:00.946755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.946911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 735 RawX2: 4294970012 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.947026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:00.947168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:00.947728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.947771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:16:00.947897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.948041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 737 RawX2: 4294970014 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-10T12:16:00.948135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:00.948178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.948245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:00.948385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:16:00.948424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:00.948576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:00.952739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.953285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.953725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.953787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:00.953924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.953980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.954031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:00.954069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.954123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:00.954209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:785:2752] message: TxId: 102 2026-01-10T12:16:00.954262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:00.954303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:00.954346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:00.954519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:16:00.957135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:00.957191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:870:2829] TestWaitNotification: OK eventTxId 102 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::EraseRowsShouldSuccess >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> Channels20::CaIntegrationAgg [GOOD] >> Channels20::CaIntegrationIc >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:05.041286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:05.155830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:05.164135Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:05.164607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:05.164719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:05.527703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:05.527852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:05.534646Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047362421920 != 1768047362421923 2026-01-10T12:16:05.548498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:05.598659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:05.702602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:06.004015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:06.019053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:06.123368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:06.161335Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:06.161686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:06.201014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:06.201223Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:06.203271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:06.203399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:06.203465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:06.203890Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:06.204064Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:06.204227Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:06.215242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:06.245403Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:06.245669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:06.245806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:06.245850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:06.245880Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:06.245912Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.246303Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:06.246410Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:06.246502Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.246535Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:06.246588Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:06.246639Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.246699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:06.246859Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:06.247182Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:06.247308Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:06.249685Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:06.260598Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:06.260738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:06.379256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:06.384925Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:06.385028Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.386635Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.386713Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:06.386788Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:06.387144Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:06.387291Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:06.387475Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.387556Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:06.389452Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:06.389875Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:06.391174Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:06.391239Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.392182Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:06.392276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.393640Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.393683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:06.393722Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:06.393793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:06.393857Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:06.393944Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.397717Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:06.399893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:06.400142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:06.400212Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:11.708577Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:11.708874Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:11.709018Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:11.709434Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:11.709505Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:11.709999Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:11.710431Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:11.712258Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:11.712313Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:11.713111Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:11.713204Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:11.714335Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:11.714380Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:11.714433Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:11.714501Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:11.714559Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:11.714650Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:11.715524Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:11.717418Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:11.717489Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:11.718294Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:11.727419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:11.727553Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:11.727666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:11.728767Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:11.729123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:11.733692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:11.739162Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:11.858221Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:11.861359Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:16:11.896629Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:11.992296Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:11.992764Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:11.992953Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-10T12:16:12.004440Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.008417Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:12.009313Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:12.020423Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:12.020507Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.020758Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:12.020803Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:12.021075Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.021118Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.021168Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:12.021227Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.021304Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:12.022156Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:12.022481Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:12.022662Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.022701Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.022746Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:12.022951Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.023029Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.023547Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-10T12:16:12.023775Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:12.023893Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-10T12:16:12.023940Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-10T12:16:12.025795Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:12.025850Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-10T12:16:12.026000Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.026031Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.026065Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:12.026181Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.026229Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.026275Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:04.682521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:04.807989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:04.818028Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:04.818500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:04.818612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:05.221003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:05.221160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:05.228380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047361643924 != 1768047361643927 2026-01-10T12:16:05.241683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:05.289274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:05.378836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:05.691492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:05.706241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:05.811906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:05.853913Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:05.854228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.904125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.904294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.905972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:05.906054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:05.906111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:05.906448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.906620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.906731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:05.917471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.960561Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:05.960808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.960978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:05.961040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.961103Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:05.961146Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.961670Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:05.961805Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:05.961932Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.961985Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.962026Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:05.962079Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.962161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:05.962348Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:05.962617Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:05.962728Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:05.965108Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:05.976021Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:05.976179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:06.097495Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:06.103683Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:06.103781Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.105369Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.105448Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:06.105515Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:06.105818Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:06.105980Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:06.106176Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.106261Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:06.115801Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:06.116434Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:06.118339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:06.118395Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.119250Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:06.119344Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.120663Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.120712Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:06.120756Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:06.120835Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:06.120896Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:06.121000Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.125456Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:06.127243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:06.127423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:06.127483Z node 1 :TX_DATASHARD DEBUG: datashar ... ain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-10T12:16:12.380408Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:12.380555Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:16:12.418147Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1452:3222] 2026-01-10T12:16:12.418377Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:12.428152Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:12.428274Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:12.429760Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-10T12:16:12.429830Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-10T12:16:12.429895Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-10T12:16:12.430240Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:12.430389Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:12.430457Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [2:1468:3222] in generation 1 2026-01-10T12:16:12.451566Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:12.451653Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2026-01-10T12:16:12.451755Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:12.451831Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037894, actorId: [2:1470:3232] 2026-01-10T12:16:12.451865Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-10T12:16:12.451900Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2026-01-10T12:16:12.451947Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:16:12.452352Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2026-01-10T12:16:12.452444Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2026-01-10T12:16:12.452560Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-10T12:16:12.452598Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.452633Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2026-01-10T12:16:12.452670Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-10T12:16:12.453048Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1451:3221], serverId# [2:1455:3223], sessionId# [0:0:0] 2026-01-10T12:16:12.453156Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:16:12.453370Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2026-01-10T12:16:12.453435Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2026-01-10T12:16:12.453888Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-10T12:16:12.464700Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:16:12.464806Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:12.593336Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1476:3238], serverId# [2:1478:3240], sessionId# [0:0:0] 2026-01-10T12:16:12.593903Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715663 at step 5000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 5000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2026-01-10T12:16:12.593956Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:16:12.594594Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-10T12:16:12.594645Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:12.594690Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [5000:281474976715663] in PlanQueue unit at 72075186224037894 2026-01-10T12:16:12.594954Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 5000:281474976715663 keys extracted: 0 2026-01-10T12:16:12.595113Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:12.596058Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-10T12:16:12.596150Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 44] schema version# 1 2026-01-10T12:16:12.596635Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:12.597031Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.598544Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 4500 2026-01-10T12:16:12.598589Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:16:12.599500Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 5000} 2026-01-10T12:16:12.599570Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-10T12:16:12.601811Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-10T12:16:12.601887Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-10T12:16:12.601933Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037894 2026-01-10T12:16:12.601992Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [5000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:12.602041Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2026-01-10T12:16:12.602114Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:16:12.603078Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-10T12:16:12.603193Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:16:12.603404Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-10T12:16:12.603479Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2026-01-10T12:16:12.603535Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-01-10T12:16:12.603627Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-10T12:16:12.603683Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:12.606472Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 5000 2026-01-10T12:16:12.607068Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2026-01-10T12:16:12.607111Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-10T12:16:12.612423Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1506:3262], serverId# [2:1507:3263], sessionId# [0:0:0] 2026-01-10T12:16:12.612625Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1506:3262], serverId# [2:1507:3263], sessionId# [0:0:0] 2026-01-10T12:16:12.613903Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1511:3267], serverId# [2:1512:3268], sessionId# [0:0:0] 2026-01-10T12:16:12.614055Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1511:3267], serverId# [2:1512:3268], sessionId# [0:0:0] 2026-01-10T12:16:12.615597Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1516:3272], serverId# [2:1517:3273], sessionId# [0:0:0] 2026-01-10T12:16:12.615773Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1516:3272], serverId# [2:1517:3273], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:05.696846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:05.820897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:05.832468Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:05.832986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:05.833111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:06.210544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:06.210705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:06.217428Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047362660270 != 1768047362660273 2026-01-10T12:16:06.230302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:06.277750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:06.392139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:06.678992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:06.693668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:06.797652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:06.835471Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:06.835777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:06.872620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:06.872766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:06.874315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:06.874407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:06.874459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:06.874758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:06.874894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:06.875000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:06.885847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:06.906273Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:06.906538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:06.906705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:06.906742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:06.906773Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:06.906808Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.907222Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:06.907318Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:06.907407Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:06.907439Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:06.907480Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:06.907528Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:06.907584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:06.907712Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:06.907955Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:06.908058Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:06.909905Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:06.920654Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:06.920762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:07.038416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:07.043747Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:07.043840Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:07.045364Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:07.045447Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:07.045522Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:07.045845Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:07.046049Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:07.046266Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:07.046349Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:07.055488Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:07.056100Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:07.057981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:07.058042Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:07.059046Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:07.059148Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:07.060594Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:07.060649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:07.060698Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:07.060804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:07.060879Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:07.060982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:07.065648Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:07.067852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:07.068134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:07.068213Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:12.341652Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:12.341888Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:12.342022Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:12.342415Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.342486Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:12.342848Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:12.343199Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.344983Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:12.345056Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.345713Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:12.345783Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.346737Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.346793Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:12.346840Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:12.346904Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:12.346959Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:12.347064Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.347887Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:12.349908Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:12.349989Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:12.350768Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:12.358774Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:12.358868Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:12.358945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:12.359834Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:12.360223Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:12.363564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:12.369121Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:12.484621Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:12.487636Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:16:12.522157Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:12.615229Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:12.615652Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:12.615831Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-10T12:16:12.626867Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.631218Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:12.632259Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:12.643520Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:12.643610Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:12.643878Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:12.643929Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:12.644257Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.644310Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.644362Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:12.644422Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.644507Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:12.645418Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:12.645776Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:12.645974Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.646017Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.646068Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:12.646297Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.646359Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.646918Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-10T12:16:12.647213Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:12.647350Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-10T12:16:12.647403Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-10T12:16:12.649462Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:12.649514Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-10T12:16:12.649695Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:12.649730Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:12.649768Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:12.649918Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:12.649981Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:12.650030Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:06.937024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:07.036873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:07.043946Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:07.044339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:07.044412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:07.391233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:07.391336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:07.397289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047364061951 != 1768047364061954 2026-01-10T12:16:07.410201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:07.459028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:07.545695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:07.867241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:07.882085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:07.987894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:08.021132Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:08.021426Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.070726Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.070952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.073017Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:08.073139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:08.073215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:08.073680Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.073882Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.074006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:08.084883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:08.115433Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:08.115702Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:08.115858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:08.115904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:08.115948Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:08.115989Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.116536Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:08.116684Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:08.116831Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.116883Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:08.116949Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:08.117013Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.117106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:08.117288Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:08.117575Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:08.117746Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:08.120236Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:08.131029Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:08.131170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:08.251350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:08.257171Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:08.257276Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.258940Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.259028Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:08.259093Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:08.259400Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:08.259566Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:08.259775Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.259868Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:08.262165Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:08.262756Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:08.264335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:08.264386Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.265348Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:08.265440Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.266611Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.266650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:08.266687Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:08.266748Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:08.266804Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:08.266916Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.284106Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:08.286332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:08.286543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:08.286615Z node 1 :TX_DATASHARD DEBUG: datashar ... 6-01-10T12:16:12.320647Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:12.320708Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:12.553609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:12.553741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:12.578183Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047369463667 != 1768047369463671 2026-01-10T12:16:12.582180Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:12.627488Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:12.699131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:13.046773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:13.060411Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:13.165144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:13.187439Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-10T12:16:13.187684Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:13.239768Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:13.239908Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:13.241611Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:13.241705Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:13.241788Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:13.242142Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:13.242293Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:13.242387Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-10T12:16:13.253138Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:13.253235Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:13.253346Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:13.253465Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-10T12:16:13.253507Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:13.253546Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:13.253585Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:13.253958Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:13.254055Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:13.254143Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:13.254187Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:13.254251Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:13.254327Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:13.254424Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:884:2764], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:13.254866Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:13.255125Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:13.255208Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:13.256937Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:13.267690Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:13.267836Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:13.384982Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:13.386131Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:13.386190Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:13.386834Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:13.386890Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:13.386939Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:13.387236Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:13.387385Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:13.387757Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:13.387823Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:13.388305Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:13.388735Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:13.390581Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:13.390638Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:13.391376Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:13.391456Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:13.392526Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:13.392573Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:13.392626Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:13.392702Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:13.392755Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:13.392839Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:13.393625Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:13.395435Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:13.395507Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:13.396233Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:13.401951Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-10T12:16:13.402080Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2026-01-10T12:16:13.402257Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 26401, MsgBus: 16581 2026-01-10T12:13:58.159204Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705067960109522:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:58.159389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:58.698881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:58.705040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:58.705150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:58.828395Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705067960109418:2081] 1768047238097689 != 1768047238097692 2026-01-10T12:13:58.833583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:58.843275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:58.996171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:59.112025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:59.112048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:59.112058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:59.112147Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:59.135297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:59.875318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:59.882433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:59.985407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.247425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.454690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.624295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.628960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705085139980478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.629064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.630176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705085139980488:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.630226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.953246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.013056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.053630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.091002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.127131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.137909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705067960109522:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:03.148744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:03.202000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.244768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.295879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.388446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705089434948659:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.388579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.389075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705089434948664:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.389113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705089434948665:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.389145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.394267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:03.428114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705089434948668:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:14:03.505782Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705089434948719:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:05.295937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... ice] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:11.413210Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:11.427132Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:11.444055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:11.444668Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:11.479853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:11.700688Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:11.700715Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:11.700738Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:11.700830Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:11.879962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:12.183972Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:12.399623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:12.428675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:15:12.436450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.570714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.833020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:12.941963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:16.180672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705379500949893:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:16.180733Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:17.881722Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705405270755521:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.881807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.882496Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705405270755531:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:17.882547Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.070569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.134732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.179866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.226721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.278491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.345881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.428891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.504124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:18.668643Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705409565723714:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.668745Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.669239Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705409565723719:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.669288Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705409565723720:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.669562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:18.674137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:18.700271Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705409565723723:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:18.803170Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705409565723776:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:23.625680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:26.413178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:15:26.413202Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:07.873518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:07.977093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:07.985111Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:07.985600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:07.985680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:08.299820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:08.299940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:08.305318Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047365131153 != 1768047365131156 2026-01-10T12:16:08.315210Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:08.358687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:08.454910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:08.750857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:08.766091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:08.873548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:08.920789Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:08.921099Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.971333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.971455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.973068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:08.973216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:08.973284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:08.973729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.973891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.973998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:08.984856Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:09.047205Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:09.047467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:09.047598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:09.047653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:09.047714Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:09.047758Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:09.048341Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:09.048502Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:09.048634Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:09.048690Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:09.048758Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:09.048812Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:09.048891Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:09.049144Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:09.049419Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:09.049555Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:09.052006Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:09.062792Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:09.062922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:09.181762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:09.186939Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:09.187007Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:09.188323Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:09.188380Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:09.188429Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:09.188716Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:09.188894Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:09.189077Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:09.189153Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:09.191178Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:09.191720Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:09.193349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:09.193399Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:09.194264Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:09.194353Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:09.195617Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:09.195661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:09.195705Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:09.195798Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:09.195887Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:09.195982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:09.200294Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:09.202320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:09.202524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:09.202624Z node 1 :TX_DATASHARD DEBUG: datashar ... hard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:14.228725Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:14.228763Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.229151Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:14.229262Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:14.229349Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.229392Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.229429Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:14.229473Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.229565Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:884:2764], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:14.230054Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:14.230292Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:14.230382Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:14.232060Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.242863Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:14.242975Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:14.359824Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:14.360952Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:14.361009Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.361598Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.361649Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:14.361699Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:14.361948Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:14.362077Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:14.362456Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.362524Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:14.362944Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:14.363367Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.364988Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:14.365039Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.365726Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:14.365800Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.366867Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.366919Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:14.366963Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:14.367040Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:14.367095Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:14.367180Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.367895Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.369755Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:14.369832Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:14.370716Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:14.376367Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-10T12:16:14.376540Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.387668Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.387742Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.388067Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-10T12:16:14.390245Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-10T12:16:14.390404Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.390605Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.390662Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.390897Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-10T12:16:14.392731Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-10T12:16:14.392871Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.393031Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.393071Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.393249Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-10T12:16:14.394872Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-10T12:16:14.395002Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.395160Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.395201Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.395361Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-10T12:16:14.397110Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-10T12:16:14.397231Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.397458Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.397501Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.397692Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-10T12:16:14.399271Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] 2026-01-10T12:16:14.399392Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.399549Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.399584Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.399772Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:07.532972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:07.626996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:07.634554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:07.634944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:07.635019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:07.961157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:07.961303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:07.966706Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047364736042 != 1768047364736045 2026-01-10T12:16:07.980209Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:08.026309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:08.116698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:08.415514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:08.430002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:08.536479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:08.580124Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:08.580401Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.625463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.625646Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.627169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:08.627264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:08.627340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:08.627772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.627959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.628063Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:08.638968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:08.673095Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:08.673355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:08.673515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:08.673561Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:08.673604Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:08.673642Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.674143Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:08.674293Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:08.674430Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.674473Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:08.674532Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:08.674590Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.674676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:08.674868Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:08.675157Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:08.675273Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:08.677567Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:08.688501Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:08.688636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:08.816869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:08.821654Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:08.821745Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.823224Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.823303Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:08.823365Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:08.823623Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:08.823786Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:08.823991Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:08.826007Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:08.828395Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:08.828890Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:08.830655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:08.830712Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.831646Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:08.831739Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.833344Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:08.833390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:08.833439Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:08.833508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:08.833586Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:08.833688Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.846174Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:08.848274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:08.848484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:08.848551Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:14.096122Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:14.096347Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:14.096448Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:14.096749Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.096798Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:14.097155Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:14.097472Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.098754Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:14.098810Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.099406Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:14.099458Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.100260Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.100298Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:14.100343Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:14.100392Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:14.100432Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:14.100499Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.101088Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.102471Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:14.102528Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:14.103144Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:14.110180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.110270Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.110328Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.111007Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.111276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.114444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:14.119898Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.235677Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.238641Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:16:14.272909Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:14.384069Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2873], serverId# [2:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:14.384573Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:14.384779Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:16:14.395694Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.399445Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:14.400359Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.411586Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.411665Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.411897Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:14.411942Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:14.412231Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.412276Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.412319Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:14.412384Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.412464Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2880], serverId# [2:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:14.413395Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:14.413735Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:14.413910Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.413949Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.413997Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:14.414213Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.414275Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.414833Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-10T12:16:14.415106Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:14.415238Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-10T12:16:14.415289Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-10T12:16:14.444505Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:14.444575Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-10T12:16:14.444762Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.444801Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.444841Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:14.444952Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.445007Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.445055Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:02.882780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:03.008131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:03.019070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:03.019715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:03.019848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:03.373249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:03.373376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:03.379366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047359826616 != 1768047359826619 2026-01-10T12:16:03.393612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:03.442302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:03.544185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:03.838690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:03.853951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:03.961633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:04.011105Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:04.011409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:04.059751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:04.059929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:04.062040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:04.062167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:04.062246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:04.062672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:04.062825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:04.063755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:04.074634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:04.110339Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:04.110645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:04.110820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:04.110872Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:04.110912Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:04.110954Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:04.111488Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:04.111620Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:04.111741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:04.111783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:04.111856Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:04.111924Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:04.112004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:04.112164Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:04.112456Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:04.112590Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:04.114603Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:04.125428Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:04.125593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:04.244869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:04.250709Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:04.250805Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:04.251993Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:04.252049Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:04.252303Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:04.252577Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:04.252721Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:04.252907Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:04.252983Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:04.254536Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:04.254987Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:04.256413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:04.256458Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:04.257248Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:04.257346Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:04.258663Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:04.258718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:04.258766Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:04.258866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:04.258949Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:04.259019Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:04.263319Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:04.265555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:04.265808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:04.265881Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:14.514743Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:14.515054Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T12:16:14.515276Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:14.515982Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.516057Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:14.516586Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:14.517183Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.518725Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:14.518789Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.519787Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:14.519870Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.521325Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.521598Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.521642Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:14.521685Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:14.521749Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:14.521804Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:16:14.521970Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.524265Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:14.524502Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:16:14.524569Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:14.534494Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.534610Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.534688Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.535628Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.535817Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.540622Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:14.548045Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.665779Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:14.669407Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:16:14.704268Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:14.816554Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:14.817042Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:14.817270Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:16:14.828306Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.832990Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:14.834094Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:14.845555Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:14.845657Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:14.846140Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:14.846198Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:14.846317Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:14.846515Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.846579Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.846637Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:14.846706Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.847853Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:14.848299Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:14.848515Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.848568Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.848624Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:14.848898Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.848982Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.849670Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2026-01-10T12:16:14.849945Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:14.850168Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2026-01-10T12:16:14.850243Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2026-01-10T12:16:14.881967Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:14.882050Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2026-01-10T12:16:14.882181Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:14.882222Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:14.882269Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:14.882415Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:14.882480Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:14.882531Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:03.628674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:03.752706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:03.763698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:03.764282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:03.764399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:04.158438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:04.158587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:04.165125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047360603919 != 1768047360603922 2026-01-10T12:16:04.178001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:04.225746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:04.316202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:04.619467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:04.633762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:04.737697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:04.796418Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2782] 2026-01-10T12:16:04.796731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:04.848699Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:911:2784] 2026-01-10T12:16:04.848966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:04.858679Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:04.859044Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:04.860926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:04.861048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:04.861132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:04.861557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:04.861831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:04.861936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:950:2782] in generation 1 2026-01-10T12:16:04.862420Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2787] 2026-01-10T12:16:04.862644Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:04.872605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:04.872727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:04.874020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:04.874091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:04.874129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:04.874348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:04.874512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:04.874593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-10T12:16:04.875076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:04.875196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:04.876504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:16:04.876598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:16:04.876649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:16:04.876983Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:04.877075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:04.877135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-10T12:16:04.888000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:04.910712Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:04.911023Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:04.911176Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-10T12:16:04.911222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:04.911264Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:04.911310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:04.911416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:04.911448Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:16:04.911497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:04.911544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-10T12:16:04.911563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:04.911580Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:16:04.911617Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:04.911929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:04.911957Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:16:04.912015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:04.912058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-10T12:16:04.912111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:16:04.912149Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:16:04.912174Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:04.912595Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:04.912706Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:04.912744Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:04.912782Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:04.912880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2776], serverId# [1:908:2781], sessionId# [0:0:0] 2026-01-10T12:16:04.912924Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:04.913014Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:04.913082Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:16:04.913145Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:04.913227Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-10T12:16:04.913287Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-01-10T12:16:04.913371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:898:2777], serverId# [1:912:2785], sessionId# [0:0:0] 2026-01-10T12:16:04.913409Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cp ... G: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.078240Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:15.078337Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:15.079629Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:15.079904Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:15.079940Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:15.079984Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:15.080036Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:15.080100Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:16:15.080191Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.082122Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:15.082290Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:16:15.082357Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:15.091619Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:15.091742Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:15.091805Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:15.092549Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:15.092720Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:15.097212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:15.104520Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:15.242774Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:15.245854Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:16:15.280070Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:15.356542Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:15.356901Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:15.357036Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-10T12:16:15.368056Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.533212Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] { items { uint64_value: 0 } } 2026-01-10T12:16:15.555011Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1092:2905], serverId# [3:1093:2906], sessionId# [0:0:0] 2026-01-10T12:16:15.556096Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:15.567483Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:15.567590Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.567668Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-10T12:16:15.568470Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-10T12:16:15.568550Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.568726Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:15.568775Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037888 2026-01-10T12:16:15.569079Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:15.569130Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:15.569184Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:15.569239Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:15.569334Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1092:2905], serverId# [3:1093:2906], sessionId# [0:0:0] 2026-01-10T12:16:15.646697Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-10T12:16:15.646867Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2026-01-10T12:16:15.658243Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-10T12:16:15.658539Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:16:15.658798Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:16:15.658885Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:15.659193Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:1113:2879], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:1060:2879]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:1113:2879].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:16:15.659760Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:1107:2879], SessionActorId: [3:1060:2879], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:1060:2879]. 2026-01-10T12:16:15.660162Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=M2M4YjVkZDktNjMzMDlmOGQtNGM0NDBkN2EtZDdjNjc5NTk=, ActorId: [3:1060:2879], ActorState: ExecuteState, LegacyTraceId: 01kekxbx6hek8p6zd0s7pzwgwp, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:1108:2879] from: [3:1107:2879] trace_id# 2026-01-10T12:16:15.660363Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:1108:2879] TxId: 281474976710662. Ctx: { TraceId: 01kekxbx6hek8p6zd0s7pzwgwp, Database: , SessionId: ydb://session/3?node_id=3&id=M2M4YjVkZDktNjMzMDlmOGQtNGM0NDBkN2EtZDdjNjc5NTk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:16:15.660738Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2M4YjVkZDktNjMzMDlmOGQtNGM0NDBkN2EtZDdjNjc5NTk=, ActorId: [3:1060:2879], ActorState: ExecuteState, LegacyTraceId: 01kekxbx6hek8p6zd0s7pzwgwp, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:16:15.661724Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-10T12:16:15.661792Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-10T12:16:15.661980Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:04.591311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:04.717219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:04.725556Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:04.726004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:04.726105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:05.071611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:05.071717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:05.077059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047361864338 != 1768047361864341 2026-01-10T12:16:05.086208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:05.129467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:05.224113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:05.498213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:05.513660Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:05.616957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:05.653269Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:05.653529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.698872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.699023Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.700524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:05.700602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:05.700653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:05.700941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.701085Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.701175Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:05.711912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.745596Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:05.745860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.745983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:05.746022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.746058Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:05.746092Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.746682Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:05.746784Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:05.746918Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.746958Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.747014Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:05.747069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.747130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:05.747268Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:05.747499Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:05.747614Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:05.749407Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:05.760214Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:05.760355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:05.880129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:05.892203Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:05.892289Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.893649Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.893715Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:05.893763Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:05.894059Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:05.894233Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:05.894417Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.894490Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:05.896546Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:05.897151Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.898686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:05.898740Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.899523Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:05.899616Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.900853Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.900897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.900940Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:05.901011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:05.901092Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:05.901196Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.905615Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:05.907429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:05.907638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:05.907696Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:16.089546Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:16.089851Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T12:16:16.090001Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:16.090789Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.090870Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:16.091447Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:16.091993Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.093736Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:16.093797Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.094918Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:16.095005Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.096686Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.097030Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.097086Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:16.097143Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:16.097217Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:16.097275Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:16:16.097369Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.099798Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:16.100048Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:16:16.100159Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:16.110817Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.110951Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.111043Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.112067Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.112291Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.117633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:16.125732Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.244491Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.247415Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:16:16.281939Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:16.372504Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:16.372879Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:16.373054Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-10T12:16:16.384165Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.389313Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:16.390547Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:16.402013Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:16.402111Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.402571Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:16.402627Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:16.402728Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:16.402884Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.402939Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.402999Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:16.403095Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.404187Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:16.404566Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:16.404748Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.404794Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.404847Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:16.405109Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.405194Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.405815Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2026-01-10T12:16:16.406067Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:16.406265Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2026-01-10T12:16:16.406328Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2026-01-10T12:16:16.440356Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:16.440439Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2026-01-10T12:16:16.440586Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.440629Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.440672Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:16.440819Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.440894Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.440955Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:04.718338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:04.821656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:04.829844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:04.830210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:04.830318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:05.124426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:05.124607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:05.129716Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047362005824 != 1768047362005827 2026-01-10T12:16:05.142379Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:05.187654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:05.280004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:05.561582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:05.576815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:05.683849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:05.722136Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:05.722369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.772312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.772526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.774685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:05.774797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:05.774907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:05.775363Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.775590Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.775733Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:05.786615Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.830288Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:05.830579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.830732Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:05.830781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.830839Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:05.830881Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.831509Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:05.831655Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:05.831793Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.831847Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.831920Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:05.831980Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.832061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:05.832282Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:05.832670Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:05.832831Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:05.835318Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:05.846268Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:05.846424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:05.966236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:05.973458Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:05.973577Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.975369Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.975450Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:05.975528Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:05.975863Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:05.976173Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:05.976476Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:05.976571Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:05.978924Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:05.979474Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.981378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:05.981439Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.982489Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:05.982597Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.984202Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:05.984257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.984307Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:05.984394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:05.984485Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:05.984580Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.989934Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:05.992198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:05.992389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:05.992457Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:16.075057Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:16.075323Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T12:16:16.075436Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:16.075964Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.076027Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:16.076433Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:16.076817Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.078464Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:16.078534Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.079753Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:16.079856Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.081495Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.081826Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.081879Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:16.081932Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:16.082004Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:16.082071Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:16:16.082175Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.084881Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:16.085145Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:16:16.085226Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:16.096752Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.096884Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.096969Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.097987Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.098191Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.103699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:16.111670Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.230421Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:16.233502Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:16:16.268676Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:16.433934Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:16.434433Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:16.434646Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:16:16.445749Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.450585Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:16.451957Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:16.463646Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:16.463748Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:16.464199Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:16.464257Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:16.464382Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:16.464572Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.464640Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.464716Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:16.464795Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.466081Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:16.466437Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:16.466614Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.466673Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.466716Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:16.466929Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.466989Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.467568Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2026-01-10T12:16:16.467853Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:16.468046Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2026-01-10T12:16:16.468119Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2026-01-10T12:16:16.496267Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:16.496364Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2026-01-10T12:16:16.496538Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:16.496587Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:16.496646Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:16.496805Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:16.496870Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:16.496922Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin >> TColumnShardTestSchema::HotTiers >> MoveTable::WithCommitInProgress+Reboot >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous >> MoveTable::WithCommitInProgress-Reboot >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId >> MoveTable::WithCommitInProgress-Reboot [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin >> MoveTable::WithCommitInProgress+Reboot [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress-Reboot [GOOD] Test command err: 2026-01-10T12:16:18.900643Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:18.932713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:18.932973Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:18.940539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:18.940803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:18.941047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:18.941179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:18.941281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:18.941380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:18.941481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:18.941597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:18.941756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:18.941863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.941959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:18.942075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:18.942194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:18.972738Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:18.973087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:18.973141Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:18.973341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:18.973562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:18.973656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:18.973698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:18.973814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:18.973880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:18.973923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:18.973953Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:18.974140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:18.974202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:18.974269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:18.974320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:18.974417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:18.974469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:18.974518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:18.974547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:18.974593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:18.974634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:18.974666Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:18.974710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:18.974755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:18.974782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:18.974996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:18.975119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:18.975184Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:18.975317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:18.975377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.975416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.975472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:18.975529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:18.975559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:18.975602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:18.975639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:18.975671Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:18.975810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:18.975868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:16:20.217554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:16:20.217756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.217936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.218130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.218371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:20.218565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.218808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.219156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:345:2357] finished for tablet 9437184 2026-01-10T12:16:20.219704Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:339:2351];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1852220,"name":"_full_task","f":1852220,"d_finished":0,"c":0,"l":1868884,"d":16664},"events":[{"name":"bootstrap","f":1852610,"d_finished":3309,"c":1,"l":1855919,"d":3309},{"a":1867994,"name":"ack","f":1866208,"d_finished":1607,"c":1,"l":1867815,"d":2497},{"a":1867980,"name":"processing","f":1856391,"d_finished":5388,"c":3,"l":1867818,"d":6292},{"name":"ProduceResults","f":1854977,"d_finished":3034,"c":6,"l":1868488,"d":3034},{"a":1868494,"name":"Finish","f":1868494,"d_finished":0,"c":0,"l":1868884,"d":390},{"name":"task_result","f":1856415,"d_finished":3710,"c":2,"l":1866081,"d":3710}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.219796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:20.220418Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:339:2351];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1852220,"name":"_full_task","f":1852220,"d_finished":0,"c":0,"l":1869504,"d":17284},"events":[{"name":"bootstrap","f":1852610,"d_finished":3309,"c":1,"l":1855919,"d":3309},{"a":1867994,"name":"ack","f":1866208,"d_finished":1607,"c":1,"l":1867815,"d":3117},{"a":1867980,"name":"processing","f":1856391,"d_finished":5388,"c":3,"l":1867818,"d":6912},{"name":"ProduceResults","f":1854977,"d_finished":3034,"c":6,"l":1868488,"d":3034},{"a":1868494,"name":"Finish","f":1868494,"d_finished":0,"c":0,"l":1869504,"d":1010},{"name":"task_result","f":1856415,"d_finished":3710,"c":2,"l":1866081,"d":3710}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.220538Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:20.124913Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:16:20.220612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:20.220828Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-10T12:16:20.221751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-10T12:16:20.222236Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-10T12:16:20.222398Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-10T12:16:20.222480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:20.222572Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2026-01-10T12:16:18.800071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:18.835276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:18.835556Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:18.843871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:18.844171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:18.844444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:18.844621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:18.844750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:18.844861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:18.844975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:18.845101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:18.845265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:18.845383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.845502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:18.845626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:18.845754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:18.881550Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:18.881994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:18.882060Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:18.882265Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:18.882460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:18.882556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:18.882610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:18.882737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:18.882810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:18.882856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:18.882890Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:18.883074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:18.883180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:18.883240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:18.883293Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:18.883395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:18.883457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:18.883533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:18.883571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:18.883626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:18.883667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:18.883703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:18.883748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:18.883792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:18.883824Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:18.884107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:18.884244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:18.884287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:18.884436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:18.884496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.884536Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:18.884602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:18.884659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:18.884689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:18.884737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:18.884774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:18.884823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:18.884965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:18.885027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... Id=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:16:20.664264Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:16:20.664491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.664667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.664860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.665104Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:20.665279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.665444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.665748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:537:2501] finished for tablet 9437184 2026-01-10T12:16:20.666183Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:530:2495];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":2348885,"name":"_full_task","f":2348885,"d_finished":0,"c":0,"l":2364333,"d":15448},"events":[{"name":"bootstrap","f":2349279,"d_finished":2324,"c":1,"l":2351603,"d":2324},{"a":2363609,"name":"ack","f":2361734,"d_finished":1684,"c":1,"l":2363418,"d":2408},{"a":2363594,"name":"processing","f":2351801,"d_finished":5512,"c":3,"l":2363423,"d":6251},{"name":"ProduceResults","f":2351144,"d_finished":2829,"c":6,"l":2364007,"d":2829},{"a":2364012,"name":"Finish","f":2364012,"d_finished":0,"c":0,"l":2364333,"d":321},{"name":"task_result","f":2351820,"d_finished":3766,"c":2,"l":2361575,"d":3766}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.666295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:20.666757Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:530:2495];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":2348885,"name":"_full_task","f":2348885,"d_finished":0,"c":0,"l":2364867,"d":15982},"events":[{"name":"bootstrap","f":2349279,"d_finished":2324,"c":1,"l":2351603,"d":2324},{"a":2363609,"name":"ack","f":2361734,"d_finished":1684,"c":1,"l":2363418,"d":2942},{"a":2363594,"name":"processing","f":2351801,"d_finished":5512,"c":3,"l":2363423,"d":6785},{"name":"ProduceResults","f":2351144,"d_finished":2829,"c":6,"l":2364007,"d":2829},{"a":2364012,"name":"Finish","f":2364012,"d_finished":0,"c":0,"l":2364867,"d":855},{"name":"task_result","f":2351820,"d_finished":3766,"c":2,"l":2361575,"d":3766}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:20.666858Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:20.596603Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:16:20.666903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:20.667080Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-10T12:16:20.668005Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-10T12:16:20.668459Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-10T12:16:20.668626Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-10T12:16:20.668704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:20.668804Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:15:50.418055Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:15:50.422267Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:15:50.422779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:50.450795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:50.451035Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:50.458285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:50.458513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:50.458734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:50.458869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:50.458966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:50.459038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:50.459137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:50.459270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:50.459364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:50.459494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.459640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:50.459763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:50.459911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:50.484807Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:15:50.490068Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:50.490503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:50.490567Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:50.490794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:50.491003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:50.491097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:50.491144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:50.491270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:50.491344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:50.491396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:50.491436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:50.491631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:50.491712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:50.491757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:50.491799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:50.491893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:50.491954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:50.492035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:50.492092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:50.492170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:50.492212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:50.492243Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:50.492294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:50.492336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:50.492443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:50.492679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:50.492755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:50.492801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:50.492940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:50.492989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.493027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:50.493089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:50.493132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:50.493162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:50.493209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:15:50.493273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... :16:21.054988Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:16:21.055033Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:16:21.055362Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:21.055642Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.055721Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:21.055945Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:16:21.056043Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-10T12:16:21.056411Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:589:2568];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-10T12:16:21.056647Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.056811Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.057027Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.057275Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:21.057490Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.057733Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.058098Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:590:2569] finished for tablet 9437184 2026-01-10T12:16:21.058752Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:589:2568];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":31183022,"name":"_full_task","f":31183022,"d_finished":0,"c":0,"l":31193748,"d":10726},"events":[{"name":"bootstrap","f":31183375,"d_finished":1638,"c":1,"l":31185013,"d":1638},{"a":31192809,"name":"ack","f":31190895,"d_finished":1739,"c":1,"l":31192634,"d":2678},{"a":31192783,"name":"processing","f":31185218,"d_finished":4395,"c":3,"l":31192638,"d":5360},{"name":"ProduceResults","f":31184432,"d_finished":3057,"c":6,"l":31193331,"d":3057},{"a":31193343,"name":"Finish","f":31193343,"d_finished":0,"c":0,"l":31193748,"d":405},{"name":"task_result","f":31185240,"d_finished":2560,"c":2,"l":31190622,"d":2560}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.058860Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:589:2568];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:21.059478Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:589:2568];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":31183022,"name":"_full_task","f":31183022,"d_finished":0,"c":0,"l":31194487,"d":11465},"events":[{"name":"bootstrap","f":31183375,"d_finished":1638,"c":1,"l":31185013,"d":1638},{"a":31192809,"name":"ack","f":31190895,"d_finished":1739,"c":1,"l":31192634,"d":3417},{"a":31192783,"name":"processing","f":31185218,"d_finished":4395,"c":3,"l":31192638,"d":6099},{"name":"ProduceResults","f":31184432,"d_finished":3057,"c":6,"l":31193331,"d":3057},{"a":31193343,"name":"Finish","f":31193343,"d_finished":0,"c":0,"l":31194487,"d":1144},{"name":"task_result","f":31185240,"d_finished":2560,"c":2,"l":31190622,"d":2560}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:21.059579Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:21.045318Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-10T12:16:21.059642Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:21.059848Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:06.460209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:06.558407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:06.566407Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:06.566913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:06.567035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:06.853945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:06.854047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:06.858633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047363723749 != 1768047363723752 2026-01-10T12:16:06.867737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:06.910591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:06.993265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:07.285578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:07.299768Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:07.405500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:07.450919Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2782] 2026-01-10T12:16:07.451195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:07.493176Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:911:2784] 2026-01-10T12:16:07.493432Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:07.503113Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:07.503430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:07.505177Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:07.505293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:07.505371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:07.505774Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:07.506030Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:07.506116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:950:2782] in generation 1 2026-01-10T12:16:07.506516Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2787] 2026-01-10T12:16:07.506736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:07.516331Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:07.516474Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:07.518048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:07.518147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:07.518204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:07.518545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:07.518780Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:07.518845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-10T12:16:07.519250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:07.519342Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:07.520784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:16:07.520870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:16:07.520920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:16:07.521264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:07.521388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:07.521461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-10T12:16:07.532491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:07.562771Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:07.563029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:07.563180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-10T12:16:07.563219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:07.563273Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:07.563311Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:07.563422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:07.563454Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:16:07.563501Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:07.563541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-10T12:16:07.563561Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:07.563583Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:16:07.563603Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:07.563984Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:07.564025Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:16:07.564104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:07.564167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-10T12:16:07.564197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:16:07.564222Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:16:07.564241Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:07.564637Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:07.564720Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:07.564770Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:07.564826Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:07.564943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2776], serverId# [1:908:2781], sessionId# [0:0:0] 2026-01-10T12:16:07.565006Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:07.565055Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:07.565097Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:16:07.565145Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:07.565197Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-10T12:16:07.565249Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-01-10T12:16:07.565347Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:898:2777], serverId# [1:912:2785], sessionId# [0:0:0] 2026-01-10T12:16:07.565398Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cp ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037890, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:16:20.413883Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037890, table# 7, finished edge# 0, front# 0 2026-01-10T12:16:20.415489Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037890, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:16:20.415529Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037890, table# 8, finished edge# 0, front# 0 2026-01-10T12:16:20.416422Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:16:20.416464Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2026-01-10T12:16:20.416831Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:256: 72075186224037890 snapshot complete for split OpId 281474976710663 2026-01-10T12:16:20.417015Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976710663 2026-01-10T12:16:20.417065Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976710663 2026-01-10T12:16:20.417092Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976710663 2026-01-10T12:16:20.417114Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976710663 2026-01-10T12:16:20.417361Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976710663 2026-01-10T12:16:20.417562Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976710663 2026-01-10T12:16:20.417591Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976710663 2026-01-10T12:16:20.417615Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976710663 2026-01-10T12:16:20.417637Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976710663 2026-01-10T12:16:20.417719Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037890 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976710663 2026-01-10T12:16:20.418146Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:424: 72075186224037890 Sending snapshots from src for split OpId 281474976710663 2026-01-10T12:16:20.418307Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976710663 from datashard 72075186224037890 to datashard 72075186224037892 size 221 2026-01-10T12:16:20.418402Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976710663 from datashard 72075186224037890 to datashard 72075186224037891 size 215 2026-01-10T12:16:20.418630Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [3:1382:3097], serverId# [3:1383:3098], sessionId# [0:0:0] 2026-01-10T12:16:20.418670Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037892, clientId# [3:1381:3096], serverId# [3:1384:3099], sessionId# [0:0:0] 2026-01-10T12:16:20.418795Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976710663 from tabeltId 72075186224037890 2026-01-10T12:16:20.419395Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976710663 from tabeltId 72075186224037890 2026-01-10T12:16:20.420714Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976710663 2026-01-10T12:16:20.420861Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-10T12:16:20.420955Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:20.421059Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:16:20.421109Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [3:1387:3102] 2026-01-10T12:16:20.421139Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-10T12:16:20.421181Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-10T12:16:20.421210Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:16:20.421313Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037890 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976710663 2026-01-10T12:16:20.421923Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2026-01-10T12:16:20.421971Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:16:20.422156Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-10T12:16:20.422181Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:20.422205Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-10T12:16:20.422229Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-10T12:16:20.422326Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1382:3097], serverId# [3:1383:3098], sessionId# [0:0:0] 2026-01-10T12:16:20.422383Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976710663 2026-01-10T12:16:20.422462Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-01-10T12:16:20.422515Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:20.422557Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-10T12:16:20.422599Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037892, actorId: [3:1388:3103] 2026-01-10T12:16:20.422621Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037892 2026-01-10T12:16:20.422645Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2026-01-10T12:16:20.422663Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:16:20.422753Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037890 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976710663 2026-01-10T12:16:20.423314Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:16:20.423353Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:20.423375Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-10T12:16:20.423396Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:16:20.423605Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-10T12:16:20.423649Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-10T12:16:20.423716Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1381:3096], serverId# [3:1384:3099], sessionId# [0:0:0] 2026-01-10T12:16:20.423746Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-01-10T12:16:20.423767Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:16:20.424010Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-10T12:16:20.424040Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-10T12:16:20.445350Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037890 ack split to schemeshard 281474976710663 2026-01-10T12:16:20.450274Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976710663, at datashard: 72075186224037890, state: SplitSrcWaitForPartitioningChanged 2026-01-10T12:16:20.452306Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:16:20.452386Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037890 2026-01-10T12:16:20.452799Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:16:20.452847Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037890 state 5 2026-01-10T12:16:20.452952Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:1275:3017], serverId# [3:1276:3018], sessionId# [0:0:0] 2026-01-10T12:16:20.453142Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037890 ack split partitioning changed to schemeshard 281474976710663 2026-01-10T12:16:20.453210Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:16:20.453260Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:20.659054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:20.685122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:20.685373Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:20.692149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:20.692404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:20.692627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:20.692773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:20.692881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:20.693006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:20.693132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:20.693238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:20.693347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:20.693452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.693559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:20.693675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:20.693803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:20.723653Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:20.723838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:20.723880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:20.724052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:20.724253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:20.724407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:20.724470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:20.724595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:20.724665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:20.724714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:20.724759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:20.724975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:20.725044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:20.725097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:20.725154Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:20.725247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:20.725303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:20.725352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:20.725379Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:20.725420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:20.725450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:20.725470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:20.725502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:20.725546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:20.725574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:20.725747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:20.725794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:20.725864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:20.725957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:20.725991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.726020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.726085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:20.726132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:20.726162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:20.726192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:20.726226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:20.726255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:20.726361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:20.726395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 19:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:22.169791Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136630383756480;op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136836545311616;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:22.169929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136630383756480;op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136836545311616;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:22.169995Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136630383756480;op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047381728;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136836545311616;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-10T12:16:22.170342Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:22.170502Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047381728 at tablet 9437184, mediator 0 2026-01-10T12:16:22.170557Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-10T12:16:22.170824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:22.170869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:22.170945Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:22.171043Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-10T12:16:22.171106Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-10T12:16:22.171367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:22.172147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-10T12:16:22.184269Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:22.185753Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136630383759392;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1768047381731;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:22.198373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047381731;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136630383759392;op_tx=120:TX_KIND_SCHEMA;min=1768047381731;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:22.198449Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047381731;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136630383759392;op_tx=120:TX_KIND_SCHEMA;min=1768047381731;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:22.199648Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136630383761184;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1768047381732;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:22.211897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047381732;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136630383761184;op_tx=121:TX_KIND_SCHEMA;min=1768047381732;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:22.212014Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047381732;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136630383761184;op_tx=121:TX_KIND_SCHEMA;min=1768047381732;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:22.213400Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136630383762976;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1768047381734;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:22.225479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047381734;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136630383762976;op_tx=122:TX_KIND_SCHEMA;min=1768047381734;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:22.225547Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047381734;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136630383762976;op_tx=122:TX_KIND_SCHEMA;min=1768047381734;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system >> TColumnShardTestSchema::RebootHotTiers >> VDiskIOTest::HugeBlobIOCount [GOOD] >> MoveTable::WithUncomittedData >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:19.448852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:19.483280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:19.483582Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:19.491920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:19.492214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:19.492469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:19.492608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:19.492731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:19.492846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:19.492976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:19.493051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:19.493136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:19.493219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:19.493371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:19.493500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:19.493652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:19.525434Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:19.525884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:19.525956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:19.526167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:19.526400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:19.526498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:19.526551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:19.526670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:19.526746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:19.526797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:19.526828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:19.527013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:19.527083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:19.527131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:19.527188Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:19.527294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:19.527360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:19.527414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:19.527452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:19.527507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:19.527550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:19.527581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:19.527626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:19.527668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:19.527700Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:19.527930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:19.528055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:19.528116Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:19.528307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:19.528354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:19.528386Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:19.528449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:19.528509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:19.528549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:19.528605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:19.528645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:19.528678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:19.528821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:19.528922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:22.647586Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:16:22.647650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:22.648147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.187000s; 2026-01-10T12:16:22.648210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:16:22.741274Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047380592:max} readable: {1768047380592:max} at tablet 9437184 2026-01-10T12:16:22.741461Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:22.745332Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047380592:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:22.745477Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047380592:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:22.746286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047380592:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:22.748336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047380592:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:22.807614Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047380592:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2026-01-10T12:16:22.809248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:22.809609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:22.810029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.810231Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.810703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:22.810920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.811121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.811376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2026-01-10T12:16:22.811909Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":3874008,"name":"_full_task","f":3874008,"d_finished":0,"c":0,"l":3877935,"d":3927},"events":[{"name":"bootstrap","f":3874476,"d_finished":2301,"c":1,"l":3876777,"d":2301},{"a":3877128,"name":"ack","f":3877128,"d_finished":0,"c":0,"l":3877935,"d":807},{"a":3877104,"name":"processing","f":3877104,"d_finished":0,"c":0,"l":3877935,"d":831},{"name":"ProduceResults","f":3876353,"d_finished":818,"c":2,"l":3877653,"d":818},{"a":3877658,"name":"Finish","f":3877658,"d_finished":0,"c":0,"l":3877935,"d":277}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.812013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:22.812536Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":3874008,"name":"_full_task","f":3874008,"d_finished":0,"c":0,"l":3878588,"d":4580},"events":[{"name":"bootstrap","f":3874476,"d_finished":2301,"c":1,"l":3876777,"d":2301},{"a":3877128,"name":"ack","f":3877128,"d_finished":0,"c":0,"l":3878588,"d":1460},{"a":3877104,"name":"processing","f":3877104,"d_finished":0,"c":0,"l":3878588,"d":1484},{"name":"ProduceResults","f":3876353,"d_finished":818,"c":2,"l":3877653,"d":818},{"a":3877658,"name":"Finish","f":3877658,"d_finished":0,"c":0,"l":3878588,"d":930}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:22.812655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:22.748298Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:22.812701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:22.812852Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Channels20::CaIntegrationIc [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:07.518961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:07.650162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:07.659763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:07.660314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:07.660452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:08.050154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:08.050316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:08.056731Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047364786927 != 1768047364786930 2026-01-10T12:16:08.073958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:08.122449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:08.216490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:08.528579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:08.543848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:08.649482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:08.700130Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2782] 2026-01-10T12:16:08.700416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.752486Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:911:2784] 2026-01-10T12:16:08.752784Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.761648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.761958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.763869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:08.763985Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:08.764045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:08.764484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.764747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.765621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:950:2782] in generation 1 2026-01-10T12:16:08.766113Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2787] 2026-01-10T12:16:08.766343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:08.775669Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.775811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.777351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:08.777463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:08.777520Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:08.777838Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.778061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.778127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-10T12:16:08.778495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:08.778593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:08.780027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:16:08.780125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:16:08.780186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:16:08.780539Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:08.780673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:08.780754Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-10T12:16:08.791991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:08.815751Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:08.815981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:08.816134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-10T12:16:08.816181Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:08.816224Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:08.816267Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:08.816380Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:08.816418Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:16:08.816483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:08.816531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-10T12:16:08.816553Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:08.816574Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:16:08.816593Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:08.816964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:08.817008Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:16:08.817074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:08.817130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-10T12:16:08.817161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:16:08.817189Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:16:08.817213Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:08.817627Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:08.817732Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:08.817785Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:08.817841Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:08.817993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2776], serverId# [1:908:2781], sessionId# [0:0:0] 2026-01-10T12:16:08.818045Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:08.818093Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:08.818134Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:16:08.818177Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:08.818217Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-10T12:16:08.818294Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-01-10T12:16:08.818365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:898:2777], serverId# [1:912:2785], sessionId# [0:0:0] 2026-01-10T12:16:08.818413Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cp ... d__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:16:22.220273Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:22.220394Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:16:22.220424Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:16:22.220485Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-10T12:16:22.220563Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1280:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976710662, shard# 72075186224037888, status# 1 2026-01-10T12:16:22.220660Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710661 2026-01-10T12:16:22.220724Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710661 2026-01-10T12:16:22.220766Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710661 2026-01-10T12:16:22.220810Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1280:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976710662, shard# 72075186224037889, status# 1 2026-01-10T12:16:22.220847Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710661 2026-01-10T12:16:22.220886Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1280:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976710662, shard# 72075186224037890, status# 1 2026-01-10T12:16:22.220925Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:904: [DistEraser] [3:1280:3021] Register plan: txId# 281474976710662, minStep# 2502, maxStep# 32502 2026-01-10T12:16:22.233384Z node 3 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037889 2026-01-10T12:16:22.233558Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:16:22.236432Z node 3 :TX_DATASHARD ERROR: datashard_distributed_erase.cpp:167: [DistEraser] [3:1280:3021] Reply: txId# 281474976710662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976710662, shard# 72075186224037889 2026-01-10T12:16:22.236641Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037889 from 72075186224037890 is reset 2026-01-10T12:16:22.236866Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037889 from 72075186224037888 is reset 2026-01-10T12:16:22.236942Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:16:22.236987Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037890 2026-01-10T12:16:22.237541Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:16:22.237586Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:22.237622Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 1 2026-01-10T12:16:22.237670Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:16:22.237759Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:1275:3017], serverId# [3:1276:3018], sessionId# [0:0:0] 2026-01-10T12:16:22.259556Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [3:1292:3032] 2026-01-10T12:16:22.259867Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:22.263318Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:22.264324Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:22.266371Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:22.266464Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:22.266527Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:22.266945Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:22.267321Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:22.267391Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [3:1307:3032] in generation 2 2026-01-10T12:16:22.278819Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:22.278976Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-10T12:16:22.279177Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:22.279626Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [3:1309:3040] 2026-01-10T12:16:22.279690Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:22.279744Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:16:22.279804Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:22.280183Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-10T12:16:22.280497Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-10T12:16:22.281670Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:22.281783Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:22.281933Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-10T12:16:22.282017Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 2501 2026-01-10T12:16:22.282057Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:22.282146Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:22.282176Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:22.282212Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-10T12:16:22.282248Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:22.282425Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037889 2026-01-10T12:16:22.282458Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037889 from 72075186224037889 to 72075186224037888 txId 281474976710661 2026-01-10T12:16:22.282494Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037889 from 72075186224037889 to 72075186224037888 txId 281474976710661 2026-01-10T12:16:22.282656Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037889 2026-01-10T12:16:22.282679Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037889 from 72075186224037889 to 72075186224037890 txId 281474976710661 2026-01-10T12:16:22.282697Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037889 from 72075186224037889 to 72075186224037890 txId 281474976710661 2026-01-10T12:16:22.282762Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 2500 next step 2501 2026-01-10T12:16:22.282823Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976710661 2026-01-10T12:16:22.282886Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2501 txid# 281474976710661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:16:22.282932Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976710661 at 72075186224037888 2026-01-10T12:16:22.282979Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:16:22.283026Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037888 {TEvReadSet step# 2501 txid# 281474976710661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:16:22.283088Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976710661 2026-01-10T12:16:22.283124Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2501 txid# 281474976710661 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-10T12:16:22.283145Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976710661 at 72075186224037890 2026-01-10T12:16:22.283167Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-10T12:16:22.283190Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037890 {TEvReadSet step# 2501 txid# 281474976710661 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-10T12:16:22.283258Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710661 2026-01-10T12:16:22.283314Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976710661 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:22.471392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:22.505232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:22.505498Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:22.513341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:22.513589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:22.513814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:22.513965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:22.514085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:22.514192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:22.514301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:22.514418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:22.514514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:22.514655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:22.514767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:22.514873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:22.514983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:22.546943Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:22.547105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:22.547165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:22.547386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:22.547576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:22.547686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:22.547742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:22.547853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:22.547922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:22.547972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:22.548003Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:22.548213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:22.548314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:22.548370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:22.548412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:22.548502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:22.548558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:22.548606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:22.548650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:22.548709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:22.548760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:22.548797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:22.548847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:22.548887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:22.548918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:22.549153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:22.549203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:22.549294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:22.549447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:22.549498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:22.549536Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:22.549602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:22.549667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:22.549702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:22.549751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:22.549801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:22.549840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:22.549984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:22.550033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:23.694660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137120239609696;op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137326400903744;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:23.694729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137120239609696;op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137326400903744;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:23.694774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137120239609696;op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047383494;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137326400903744;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-10T12:16:23.695080Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:23.695181Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047383494 at tablet 9437184, mediator 0 2026-01-10T12:16:23.695226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-10T12:16:23.695520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:23.695569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:23.695638Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:23.695717Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-10T12:16:23.695786Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-10T12:16:23.696021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:23.696503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-10T12:16:23.708959Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:23.710358Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137120239612608;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1768047383497;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:23.722974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047383497;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137120239612608;op_tx=120:TX_KIND_SCHEMA;min=1768047383497;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:23.723064Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047383497;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137120239612608;op_tx=120:TX_KIND_SCHEMA;min=1768047383497;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:23.724624Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137120239614400;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1768047383499;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:23.736805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047383499;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137120239614400;op_tx=121:TX_KIND_SCHEMA;min=1768047383499;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:23.736884Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047383499;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137120239614400;op_tx=121:TX_KIND_SCHEMA;min=1768047383499;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:23.738445Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137120239616192;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1768047383500;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:23.751152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047383500;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137120239616192;op_tx=122:TX_KIND_SCHEMA;min=1768047383500;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:23.751276Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047383500;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137120239616192;op_tx=122:TX_KIND_SCHEMA;min=1768047383500;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount [GOOD] Test command err: Total puts# 5000 Total vdisk writes# 10093 Total log vdisk writes# 5084 Total chunk vdisk writes# 5009 Log vdisk writes per put# 1.0168 Chunk vdisk writes per put# 1.0018 VDisk writes per put# 2.0186 DeviceWrites 10093 WriteLog requests 5084 WriteHugeUser requests 5000 >> MoveTable::WithUncomittedData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:03.792504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:03.921584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:03.933318Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:03.933822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:03.933937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:04.341497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:04.341647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:04.348934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047360697751 != 1768047360697754 2026-01-10T12:16:04.365743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:04.415777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:04.506666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:04.822137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:04.836918Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:04.942304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:04.996903Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2782] 2026-01-10T12:16:04.997163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.048311Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:911:2784] 2026-01-10T12:16:05.048570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.057477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.057747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.059627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:05.059754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:05.059818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:05.060262Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.060558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.060684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:950:2782] in generation 1 2026-01-10T12:16:05.061111Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2787] 2026-01-10T12:16:05.061330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.071361Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.071496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.073056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:05.073150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:05.073224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:05.073566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.073782Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.073847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-10T12:16:05.074233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:05.074329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:05.075829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:16:05.075912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:16:05.075976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:16:05.076378Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:05.076514Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:05.076581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-10T12:16:05.087610Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.144735Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:05.144999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.145164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-10T12:16:05.145223Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:05.145271Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:05.145310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:05.145417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.145453Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:16:05.145517Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.145565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-10T12:16:05.145586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:05.145627Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:16:05.145651Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:05.146018Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:05.146054Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:16:05.146122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:05.146185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-10T12:16:05.146216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:16:05.146246Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:16:05.146269Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:05.146735Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:05.146871Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:05.146938Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:05.146988Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:05.147128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2776], serverId# [1:908:2781], sessionId# [0:0:0] 2026-01-10T12:16:05.147191Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:05.147250Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:05.147300Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:16:05.147341Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:05.147388Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-10T12:16:05.147456Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-01-10T12:16:05.147563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:898:2777], serverId# [1:912:2785], sessionId# [0:0:0] 2026-01-10T12:16:05.147619Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cp ... 037888 2026-01-10T12:16:22.847595Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710662 2026-01-10T12:16:22.847648Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1280:3021] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976710662, shard# 72075186224037888, status# 2 2026-01-10T12:16:22.847720Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:22.847749Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976710662] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1280:3021], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:16:22.847806Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 3000 txid# 281474976710662 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 6} 2026-01-10T12:16:22.847833Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:22.847886Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710662 2026-01-10T12:16:22.847914Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1280:3021] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976710662, shard# 72075186224037889, status# 2 2026-01-10T12:16:22.847945Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1280:3021] Reply: txId# 281474976710662, status# OK, error# 2026-01-10T12:16:22.848190Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:16:22.848233Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037890 2026-01-10T12:16:22.848546Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:16:22.848590Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:22.848634Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-10T12:16:22.848698Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:16:22.848775Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:1275:3017], serverId# [3:1276:3018], sessionId# [0:0:0] 2026-01-10T12:16:22.849832Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-10T12:16:22.850205Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-10T12:16:22.850369Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:16:22.850416Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.850461Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710664] at 72075186224037890 for WaitForStreamClearance 2026-01-10T12:16:22.850677Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.850755Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:16:22.851298Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710664, MessageQuota: 1 2026-01-10T12:16:22.851552Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976710664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:22.851730Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710664, PendingAcks: 0 2026-01-10T12:16:22.851785Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976710664, MessageQuota: 0 2026-01-10T12:16:22.853524Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:16:22.853567Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710664, at: 72075186224037890 2026-01-10T12:16:22.853675Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:16:22.853698Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.853743Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710664] at 72075186224037890 for ReadTableScan 2026-01-10T12:16:22.853845Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:22.853883Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:16:22.853927Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:22.900302Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:16:22.900752Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:16:22.900991Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:22.901049Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.901106Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710665] at 72075186224037889 for WaitForStreamClearance 2026-01-10T12:16:22.901385Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.901464Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:22.902163Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976710665, MessageQuota: 1 2026-01-10T12:16:22.902440Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037889, TxId: 281474976710665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:22.902598Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037889, TxId: 281474976710665, PendingAcks: 0 2026-01-10T12:16:22.902656Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037889, TxId: 281474976710665, MessageQuota: 0 2026-01-10T12:16:22.904599Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-10T12:16:22.904664Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710665, at: 72075186224037889 2026-01-10T12:16:22.904850Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:22.904890Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.904927Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710665] at 72075186224037889 for ReadTableScan 2026-01-10T12:16:22.905062Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:22.905125Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:22.905172Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:22.935379Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:22.935710Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:22.935867Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:22.935908Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.935947Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710666] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:22.936164Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.936218Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:22.936708Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710666, MessageQuota: 1 2026-01-10T12:16:22.936907Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:22.937017Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710666, PendingAcks: 0 2026-01-10T12:16:22.937062Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710666, MessageQuota: 0 2026-01-10T12:16:22.938481Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:22.938529Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710666, at: 72075186224037888 2026-01-10T12:16:22.938650Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:22.938680Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:22.938711Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710666] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:22.938809Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:22.938848Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:22.938883Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId |90.0%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithUncomittedData [GOOD] Test command err: 2026-01-10T12:16:24.015607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:24.040983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:24.041206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:24.048449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:24.048658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:24.048855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:24.048977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:24.049052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:24.049120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:24.049186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:24.049275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:24.049392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:24.049492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:24.049662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:24.049786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:24.049923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:24.075914Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:24.076257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:24.076304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:24.076453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:24.076619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:24.076703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:24.076751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:24.076851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:24.076905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:24.076941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:24.076963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:24.077100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:24.077157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:24.077202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:24.077240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:24.077333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:24.077378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:24.077426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:24.077459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:24.077494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:24.077521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:24.077541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:24.077583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:24.077619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:24.077639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:24.077806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:24.077900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:24.077932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:24.078070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:24.078153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:24.078206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:24.078280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:24.078329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:24.078351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:24.078383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:24.078408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:24.078436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:24.078569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:24.078628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"1,2,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":20,"id":21},"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":25,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":25,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":25,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10","t":"Projection"},"w":250,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":25,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":25,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":25,"id":16},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":25,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":25,"id":10},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":25,"id":6},"22":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":25,"id":12}}}; 2026-01-10T12:16:24.801675Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1768047384992:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:24.872189Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1768047384992:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:330:2342];trace_detailed=; 2026-01-10T12:16:24.873783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);; 2026-01-10T12:16:24.874127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-10T12:16:24.874676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.874902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.875195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:24.875461Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.875695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.875989Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:330:2342] finished for tablet 9437184 2026-01-10T12:16:24.876615Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:324:2336];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1308318,"name":"_full_task","f":1308318,"d_finished":0,"c":0,"l":1312279,"d":3961},"events":[{"name":"bootstrap","f":1308698,"d_finished":2478,"c":1,"l":1311176,"d":2478},{"a":1311375,"name":"ack","f":1311375,"d_finished":0,"c":0,"l":1312279,"d":904},{"a":1311355,"name":"processing","f":1311355,"d_finished":0,"c":0,"l":1312279,"d":924},{"name":"ProduceResults","f":1310681,"d_finished":1009,"c":2,"l":1311970,"d":1009},{"a":1311977,"name":"Finish","f":1311977,"d_finished":0,"c":0,"l":1312279,"d":302}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.876713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:324:2336];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:24.877232Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:324:2336];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":1308318,"name":"_full_task","f":1308318,"d_finished":0,"c":0,"l":1313004,"d":4686},"events":[{"name":"bootstrap","f":1308698,"d_finished":2478,"c":1,"l":1311176,"d":2478},{"a":1311375,"name":"ack","f":1311375,"d_finished":0,"c":0,"l":1313004,"d":1629},{"a":1311355,"name":"processing","f":1311355,"d_finished":0,"c":0,"l":1313004,"d":1649},{"name":"ProduceResults","f":1310681,"d_finished":1009,"c":2,"l":1311970,"d":1009},{"a":1311977,"name":"Finish","f":1311977,"d_finished":0,"c":0,"l":1313004,"d":1027}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:24.877341Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:24.801632Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:24.877390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:24.877543Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TColumnShardTestSchema::ExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::CaIntegrationIc [GOOD] Test command err: Trying to start YDB, gRPC: 4127, MsgBus: 6182 2026-01-10T12:14:56.060754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705314806847316:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:56.061040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:56.178337Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705317716992779:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:56.178432Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:14:56.274686Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:14:56.874018Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:56.924623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:57.156391Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:57.198174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:57.277225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:57.277367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:57.277682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:14:57.277712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:14:57.370485Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:57.370776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:14:57.372181Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:57.383714Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:14:57.383952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:57.415688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:57.425493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:14:57.432252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:14:57.649306Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:57.960728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:58.008365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:58.008389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:58.008401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:58.008502Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:59.509611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:59.695420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:00.206702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:00.747510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:01.057016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:01.057647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705314806847316:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:01.057710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:01.183641Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705317716992779:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:01.183693Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:04.872984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705349166587915:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:04.873114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:04.876390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705349166587924:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:04.876470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:05.388943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.495489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.675367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.774627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.867561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.995072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.160060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:06.429515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ... : distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:11.707329Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:11.707345Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:11.707463Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:12.342074Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:12.370220Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:12.370249Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:12.375342Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:12.427814Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:12.464910Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:12.565787Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:12.719435Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:12.813977Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:16.336162Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593705637621971427:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:16.336261Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:16.346899Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593705637991786961:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:16.346976Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:16.357657Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7593705639607337123:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:16.357751Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:16.370948Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7593705639844970291:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:16.371039Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:16.408640Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593705659096809998:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.408772Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.409103Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593705659096810007:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.409177Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:16.605490Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:16.672735Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:16.747429Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:16.825302Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:16.896103Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.020037Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.087418Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.174306Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.296260Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593705663391778314:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:17.296409Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:17.296499Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593705663391778319:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:17.296892Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593705663391778321:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:17.296976Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:17.301841Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:17.333111Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593705663391778322:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720670 completed, doublechecking } 2026-01-10T12:16:17.423486Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593705663391778414:4626] txid# 281474976720671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:19.467447Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976720673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) |90.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:15:52.593966Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:15:52.601775Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:15:52.602221Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:52.634694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:52.634945Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:52.642298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:52.642567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:52.642799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:52.642938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:52.643062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:52.643160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:52.643261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:52.643384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:52.643506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:52.643610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:52.643724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:52.643839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:52.643970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:52.671231Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:15:52.676255Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:52.676575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:52.676642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:52.676848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:52.677150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:52.677264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:52.677313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:52.677424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:52.677485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:52.677535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:52.677572Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:52.677743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:52.677807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:52.677847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:52.677878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:52.677973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:52.678029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:52.678070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:52.678099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:52.678153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:52.678187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:52.678228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:52.678296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:52.678353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:52.678440Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:52.678671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:52.678730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:52.678768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:52.678879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:52.678923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:52.678954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:52.679020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:52.679061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:52.679089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:52.679133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:15:52.679174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ; 2026-01-10T12:16:25.162748Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:16:25.162815Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:16:25.163080Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:25.163353Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.163417Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:25.163667Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:16:25.163759Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-10T12:16:25.164135Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-10T12:16:25.164416Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.164616Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.164846Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.165099Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:25.165268Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.165399Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.165780Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-10T12:16:25.166530Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":33065956,"name":"_full_task","f":33065956,"d_finished":0,"c":0,"l":33083628,"d":17672},"events":[{"name":"bootstrap","f":33066334,"d_finished":1966,"c":1,"l":33068300,"d":1966},{"a":33082820,"name":"ack","f":33078738,"d_finished":3704,"c":2,"l":33082635,"d":4512},{"a":33082800,"name":"processing","f":33068610,"d_finished":8080,"c":5,"l":33082642,"d":8908},{"name":"ProduceResults","f":33067620,"d_finished":5152,"c":9,"l":33083168,"d":5152},{"a":33083175,"name":"Finish","f":33083175,"d_finished":0,"c":0,"l":33083628,"d":453},{"name":"task_result","f":33068638,"d_finished":4215,"c":3,"l":33078450,"d":4215}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.166683Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:25.167380Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":33065956,"name":"_full_task","f":33065956,"d_finished":0,"c":0,"l":33084498,"d":18542},"events":[{"name":"bootstrap","f":33066334,"d_finished":1966,"c":1,"l":33068300,"d":1966},{"a":33082820,"name":"ack","f":33078738,"d_finished":3704,"c":2,"l":33082635,"d":5382},{"a":33082800,"name":"processing","f":33068610,"d_finished":8080,"c":5,"l":33082642,"d":9778},{"name":"ProduceResults","f":33067620,"d_finished":5152,"c":9,"l":33083168,"d":5152},{"a":33083175,"name":"Finish","f":33083175,"d_finished":0,"c":0,"l":33084498,"d":1323},{"name":"task_result","f":33068638,"d_finished":4215,"c":3,"l":33078450,"d":4215}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.167496Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:25.145347Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-10T12:16:25.167561Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:25.167788Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047945.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047945.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046745.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-10T12:15:47.979154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:48.011947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:48.012241Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:48.019580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:48.019845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:48.020105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:48.020248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:48.020394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:48.020532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:48.020648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:48.020752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:48.020902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:48.021031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:48.021129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:48.021229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:48.021336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:48.056238Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:48.056676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:48.056740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:48.056913Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:48.057074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:48.057167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:48.057210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:48.057316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:48.057391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:48.057430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:48.057465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:48.057617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:48.057670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:48.057704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:48.057731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:48.057822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:48.057887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:48.057944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:48.057975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:48.058023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:48.058061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:48.058090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:48.058129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:48.058163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:48.058208Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:48.058412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:48.058473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:48.058504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:48.058622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:48.058662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:48.058687Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:48.058727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:48.058769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:48.058797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:48.058838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... etId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:25.821325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.821364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:25.821485Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=80000; 2026-01-10T12:16:25.821574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=80000;batch_columns=timestamp; 2026-01-10T12:16:25.821832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2026-01-10T12:16:25.821977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.822093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.822220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.822354Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:25.822482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.822597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.822930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:833:2802] finished for tablet 9437184 2026-01-10T12:16:25.823353Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:832:2801];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":40155265,"name":"_full_task","f":40155265,"d_finished":0,"c":0,"l":40171074,"d":15809},"events":[{"name":"bootstrap","f":40155546,"d_finished":1113,"c":1,"l":40156659,"d":1113},{"a":40170425,"name":"ack","f":40167876,"d_finished":2357,"c":2,"l":40170338,"d":3006},{"a":40170415,"name":"processing","f":40156882,"d_finished":5703,"c":5,"l":40170341,"d":6362},{"name":"ProduceResults","f":40156250,"d_finished":3488,"c":9,"l":40170704,"d":3488},{"a":40170709,"name":"Finish","f":40170709,"d_finished":0,"c":0,"l":40171074,"d":365},{"name":"task_result","f":40156908,"d_finished":3233,"c":3,"l":40167644,"d":3233}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:25.823441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:25.823832Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:832:2801];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":40155265,"name":"_full_task","f":40155265,"d_finished":0,"c":0,"l":40171580,"d":16315},"events":[{"name":"bootstrap","f":40155546,"d_finished":1113,"c":1,"l":40156659,"d":1113},{"a":40170425,"name":"ack","f":40167876,"d_finished":2357,"c":2,"l":40170338,"d":3512},{"a":40170415,"name":"processing","f":40156882,"d_finished":5703,"c":5,"l":40170341,"d":6868},{"name":"ProduceResults","f":40156250,"d_finished":3488,"c":9,"l":40170704,"d":3488},{"a":40170709,"name":"Finish","f":40170709,"d_finished":0,"c":0,"l":40171580,"d":871},{"name":"task_result","f":40156908,"d_finished":3233,"c":3,"l":40167644,"d":3233}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2026-01-10T12:16:25.823949Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:25.805556Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9739224;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9739224;selected_rows=0; 2026-01-10T12:16:25.824034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:25.824187Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 >> TColumnShardTestSchema::ExportWithLostAnswer >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin >> TColumnShardTestSchema::ForgetAfterFail >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:25.795042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:25.821174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:25.821401Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:25.828618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:25.828850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:25.829066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:25.829224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:25.829336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:25.829447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:25.829553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:25.829651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:25.829747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:25.829880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:25.829989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:25.830095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:25.830199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:25.858990Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:25.859170Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:25.859224Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:25.859415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:25.859580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:25.859680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:25.859722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:25.859833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:25.859900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:25.859945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:25.859979Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:25.860180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:25.860245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:25.860297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:25.860340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:25.860445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:25.860500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:25.860566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:25.860606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:25.860657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:25.860694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:25.860722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:25.860770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:25.860811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:25.860839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:25.861046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:25.861095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:25.861191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:25.861325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:25.861369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:25.861423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:25.861494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:25.861546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:25.861586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:25.861630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:25.861667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:25.861696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:25.861822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:25.861860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:27.023613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136219561060192;op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136425722343360;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:27.023709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136219561060192;op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136425722343360;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:27.023775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136219561060192;op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047386818;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136425722343360;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-10T12:16:27.024188Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:27.024364Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047386818 at tablet 9437184, mediator 0 2026-01-10T12:16:27.024426Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-10T12:16:27.024739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:27.024782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:27.024847Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:27.024913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-10T12:16:27.024963Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-10T12:16:27.025183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:27.025723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-10T12:16:27.041059Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:27.042847Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136219561063104;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1768047386821;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:27.055101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047386821;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136219561063104;op_tx=120:TX_KIND_SCHEMA;min=1768047386821;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:27.055172Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047386821;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136219561063104;op_tx=120:TX_KIND_SCHEMA;min=1768047386821;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:27.056312Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136219561064896;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1768047386822;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:27.068438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047386822;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136219561064896;op_tx=121:TX_KIND_SCHEMA;min=1768047386822;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:27.068515Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047386822;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136219561064896;op_tx=121:TX_KIND_SCHEMA;min=1768047386822;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:27.070085Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136219561066688;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1768047386824;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:27.082296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047386824;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136219561066688;op_tx=122:TX_KIND_SCHEMA;min=1768047386824;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:27.082358Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047386824;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136219561066688;op_tx=122:TX_KIND_SCHEMA;min=1768047386824;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin >> TColumnShardTestSchema::RebootColdTiers >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:04.856484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:04.951110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:04.959063Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:04.959410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:04.959497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:05.297354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:05.297500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:05.305283Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047361820846 != 1768047361820849 2026-01-10T12:16:05.317534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:05.361640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:05.459950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:05.761703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:05.777464Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:05.890573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:05.953129Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2782] 2026-01-10T12:16:05.953472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:05.996899Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:911:2784] 2026-01-10T12:16:05.997111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:06.004899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:06.005199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:06.006909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:06.007000Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:06.007069Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:06.007457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:06.007746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:06.007854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:950:2782] in generation 1 2026-01-10T12:16:06.008266Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2787] 2026-01-10T12:16:06.008489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:06.015949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:06.016048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:06.017247Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:16:06.017314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:16:06.017358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:16:06.017561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:06.017726Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:06.017785Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-10T12:16:06.018124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:06.018215Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:06.019117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:16:06.019170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:16:06.019198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:16:06.019418Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:06.019482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:06.019545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-10T12:16:06.030487Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:06.067256Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:06.067500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:06.067675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-10T12:16:06.067728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:06.067771Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:06.067826Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:06.067946Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:06.067986Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:16:06.068056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:06.068132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-10T12:16:06.068156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:16:06.068176Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:16:06.068196Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:16:06.068594Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:06.068643Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:16:06.068739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:06.068796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-10T12:16:06.068826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-10T12:16:06.068855Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:16:06.068877Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:16:06.069301Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:06.069415Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:06.069470Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:16:06.069532Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:16:06.069683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2776], serverId# [1:908:2781], sessionId# [0:0:0] 2026-01-10T12:16:06.069741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:16:06.069796Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:06.069837Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:16:06.069879Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-10T12:16:06.069925Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-10T12:16:06.069978Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-01-10T12:16:06.070074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:898:2777], serverId# [1:912:2785], sessionId# [0:0:0] 2026-01-10T12:16:06.070122Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cp ... datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 3500} 2026-01-10T12:16:26.546200Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-10T12:16:26.546266Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037893 from 72075186224037893 to 72075186224037892 txId 281474976710666 2026-01-10T12:16:26.546332Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-10T12:16:26.546403Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976710666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1603:3238], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:26.546547Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037893, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 } 2026-01-10T12:16:26.546622Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-10T12:16:26.546902Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1603:3238] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976710666, shard# 72075186224037893, status# 2 2026-01-10T12:16:26.547256Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037892 source 72075186224037893 dest 72075186224037892 producer 72075186224037893 txId 281474976710666 2026-01-10T12:16:26.547383Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037892 got read set: {TEvReadSet step# 3500 txid# 281474976710666 TabletSource# 72075186224037893 TabletDest# 72075186224037892 SetTabletProducer# 72075186224037893 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2026-01-10T12:16:26.547496Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037892 2026-01-10T12:16:26.547672Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:16:26.547718Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:26.547766Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976710666] at 72075186224037892 for LoadAndWaitInRS 2026-01-10T12:16:26.548393Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:26.548594Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037893 2026-01-10T12:16:26.548938Z node 3 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 3 change records: to# [3:1420:3129], at tablet# 72075186224037893 2026-01-10T12:16:26.549001Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037893 2026-01-10T12:16:26.549755Z node 3 :TX_DATASHARD DEBUG: datashard_change_receiving.cpp:468: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037893, generation# 1, at tablet# 72075186224037891 2026-01-10T12:16:26.561132Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:16:26.561237Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976710666] from 72075186224037892 at tablet 72075186224037892 send result to client [3:1603:3238], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:16:26.561339Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037892 {TEvReadSet step# 3500 txid# 281474976710666 TabletSource# 72075186224037893 TabletDest# 72075186224037892 SetTabletConsumer# 72075186224037892 Flags# 0 Seqno# 2} 2026-01-10T12:16:26.561398Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:16:26.561511Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037893 source 72075186224037893 dest 72075186224037892 consumer 72075186224037892 txId 281474976710666 2026-01-10T12:16:26.561605Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1603:3238] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976710666, shard# 72075186224037892, status# 2 2026-01-10T12:16:26.561658Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1603:3238] Reply: txId# 281474976710666, status# OK, error# 2026-01-10T12:16:26.562006Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037893 2026-01-10T12:16:26.562060Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037893 2026-01-10T12:16:26.562386Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037893 2026-01-10T12:16:26.562426Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 4, at tablet: 72075186224037893 2026-01-10T12:16:26.562538Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 5, at tablet: 72075186224037893 2026-01-10T12:16:26.562576Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037893 2026-01-10T12:16:26.562764Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-10T12:16:26.562808Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:26.562851Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2026-01-10T12:16:26.562941Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [3:1597:3233], serverId# [3:1598:3234], sessionId# [0:0:0] 2026-01-10T12:16:26.564120Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-10T12:16:26.564532Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-10T12:16:26.564753Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:16:26.564809Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.564872Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710667] at 72075186224037892 for WaitForStreamClearance 2026-01-10T12:16:26.565140Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.565226Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:16:26.565868Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976710667, MessageQuota: 1 2026-01-10T12:16:26.566035Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037892, TxId: 281474976710667, MessageQuota: 1 2026-01-10T12:16:26.606448Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037892 2026-01-10T12:16:26.606534Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710667, at: 72075186224037892 2026-01-10T12:16:26.606809Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-10T12:16:26.606857Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.606903Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710667] at 72075186224037892 for ReadTableScan 2026-01-10T12:16:26.607048Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:26.607124Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:16:26.607177Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:16:26.608472Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2026-01-10T12:16:26.608775Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2026-01-10T12:16:26.608938Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-10T12:16:26.608977Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.609017Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710668] at 72075186224037891 for WaitForStreamClearance 2026-01-10T12:16:26.609204Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.609256Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-10T12:16:26.609820Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976710668, MessageQuota: 1 2026-01-10T12:16:26.609951Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037891, TxId: 281474976710668, MessageQuota: 1 2026-01-10T12:16:26.611590Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037891 2026-01-10T12:16:26.611630Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710668, at: 72075186224037891 2026-01-10T12:16:26.611715Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-10T12:16:26.611748Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:26.611783Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710668] at 72075186224037891 for ReadTableScan 2026-01-10T12:16:26.611888Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:26.611936Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-10T12:16:26.611978Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> TColumnShardTestSchema::RebootForgetAfterFail >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:26.591433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:26.614329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:26.614510Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:26.620296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:26.620496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:26.620658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:26.620748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:26.620822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:26.620889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:26.620964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:26.621035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:26.621128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:26.621199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.621327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:26.621397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:26.621471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:26.641756Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:26.642051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:26.642118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:26.642270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.642415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:26.642512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:26.642541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:26.642608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:26.642652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:26.642684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:26.642702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:26.642836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.642878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:26.642909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:26.642936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:26.643001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:26.643044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:26.643089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:26.643124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:26.643172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:26.643202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:26.643223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:26.643252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:26.643278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:26.643294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:26.643456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:26.643553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:26.643586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:26.643679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:26.643707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.643723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.643763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:26.643802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:26.643825Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:26.643864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:26.643895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:26.643918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:26.644004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:26.644043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d=cleanup;skip_reason=no_changes; 2026-01-10T12:16:29.873273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:16:29.873324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:29.873649Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.097000s; 2026-01-10T12:16:29.873696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:16:29.963599Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047387747:max} readable: {1768047387747:max} at tablet 9437184 2026-01-10T12:16:29.963839Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:29.968774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047387747:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:29.968927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047387747:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:29.969778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047387747:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:29.971996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047387747:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:30.047938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047387747:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2026-01-10T12:16:30.049683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:30.050026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:30.050466Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.050696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.051166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:30.051379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.051617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.051856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2026-01-10T12:16:30.052461Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":3946554,"name":"_full_task","f":3946554,"d_finished":0,"c":0,"l":3950628,"d":4074},"events":[{"name":"bootstrap","f":3947008,"d_finished":2434,"c":1,"l":3949442,"d":2434},{"a":3949817,"name":"ack","f":3949817,"d_finished":0,"c":0,"l":3950628,"d":811},{"a":3949794,"name":"processing","f":3949794,"d_finished":0,"c":0,"l":3950628,"d":834},{"name":"ProduceResults","f":3948994,"d_finished":890,"c":2,"l":3950360,"d":890},{"a":3950366,"name":"Finish","f":3950366,"d_finished":0,"c":0,"l":3950628,"d":262}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.052589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:30.053141Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":3946554,"name":"_full_task","f":3946554,"d_finished":0,"c":0,"l":3951387,"d":4833},"events":[{"name":"bootstrap","f":3947008,"d_finished":2434,"c":1,"l":3949442,"d":2434},{"a":3949817,"name":"ack","f":3949817,"d_finished":0,"c":0,"l":3951387,"d":1570},{"a":3949794,"name":"processing","f":3949794,"d_finished":0,"c":0,"l":3951387,"d":1593},{"name":"ProduceResults","f":3948994,"d_finished":890,"c":2,"l":3950360,"d":890},{"a":3950366,"name":"Finish","f":3950366,"d_finished":0,"c":0,"l":3951387,"d":1021}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:30.053284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:29.971952Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:30.053344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:30.053496Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:28.969867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:28.993777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:28.994025Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:28.999743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:28.999906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:29.000118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:29.000268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:29.000391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:29.000487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:29.000603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:29.000712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:29.000803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:29.000953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:29.001062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:29.001163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:29.001230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:29.026901Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:29.027059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:29.027106Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:29.027310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:29.027530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:29.027634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:29.027681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:29.027789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:29.027856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:29.027900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:29.027931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:29.028143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:29.028218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:29.028273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:29.028314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:29.028401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:29.028470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:29.028512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:29.028559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:29.028611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:29.028656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:29.028688Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:29.028733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:29.028773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:29.028799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:29.029019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:29.029082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:29.029188Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:29.029317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:29.029358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:29.029393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:29.029457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:29.029499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:29.029530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:29.029582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:29.029626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:29.029654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:29.029789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:29.029849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:30.446526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137082460521664;op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137288622087680;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:30.446628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137082460521664;op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137288622087680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:30.446729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137082460521664;op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1768047390058;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137288622087680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-10T12:16:30.447104Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:30.447279Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047390058 at tablet 9437184, mediator 0 2026-01-10T12:16:30.447354Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-10T12:16:30.447743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:30.447795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-10T12:16:30.447863Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:30.447953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-10T12:16:30.448050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-10T12:16:30.448409Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:30.449202Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-10T12:16:30.461595Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:30.463383Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137082460524576;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1768047390061;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:30.475909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047390061;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137082460524576;op_tx=120:TX_KIND_SCHEMA;min=1768047390061;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:30.476006Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1768047390061;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137082460524576;op_tx=120:TX_KIND_SCHEMA;min=1768047390061;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:30.477494Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137082460526368;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1768047390062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:30.489926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047390062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137082460526368;op_tx=121:TX_KIND_SCHEMA;min=1768047390062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:30.490004Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1768047390062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137082460526368;op_tx=121:TX_KIND_SCHEMA;min=1768047390062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-10T12:16:30.491441Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137082460528160;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1768047390064;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-10T12:16:30.503682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047390064;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137082460528160;op_tx=122:TX_KIND_SCHEMA;min=1768047390064;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-10T12:16:30.503788Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1768047390064;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137082460528160;op_tx=122:TX_KIND_SCHEMA;min=1768047390064;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:16:18.959952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:16:19.067522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:16:19.074788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:16:19.075123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:16:19.075217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:16:19.385910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:19.386032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:19.392108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047376332247 != 1768047376332250 2026-01-10T12:16:19.407057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:19.452944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:19.537170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:16:19.825013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:19.839459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:19.945475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:19.977825Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:16:19.978040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:16:20.013417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:16:20.013517Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:16:20.014784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:16:20.014848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:16:20.014894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:16:20.015191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:16:20.015362Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:16:20.015466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:16:20.026206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:16:20.061166Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:16:20.061394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:16:20.061483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:16:20.061509Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:20.061536Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:16:20.061564Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:20.061923Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:16:20.062020Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:16:20.062090Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:20.062129Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:20.062180Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:20.062218Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:20.062267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:16:20.062381Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:20.062593Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:16:20.062746Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:16:20.064625Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:20.075345Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:20.075525Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:16:20.192946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:16:20.197529Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:16:20.197643Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:20.198867Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:20.198920Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:20.198960Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:20.199233Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:16:20.199426Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:20.199626Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:20.199700Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:20.201613Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:20.202048Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:20.203635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:20.203688Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:20.204688Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:20.204776Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:20.205951Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:20.205996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:20.206036Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:20.206107Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:20.206167Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:16:20.206242Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:20.210292Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:20.211872Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:20.212004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:16:20.212048Z node 1 :TX_DATASHARD DEBUG: datashar ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:16:30.314219Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:16:30.314557Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976710657 keys extracted: 0 2026-01-10T12:16:30.314742Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:16:30.315547Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:30.315643Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:16:30.316279Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:16:30.316835Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:30.318633Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:16:30.318696Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:30.320008Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:16:30.320126Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:30.321898Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:30.322288Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:30.322339Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:16:30.322397Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:16:30.322470Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:16:30.322535Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:16:30.322629Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:30.325400Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:16:30.325655Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-10T12:16:30.325741Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:16:30.335116Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:30.335216Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:30.335277Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:30.335986Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:30.336141Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:30.339759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:30.346500Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:30.464754Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:16:30.467841Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:16:30.502449Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:30.636883Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2873], serverId# [3:1054:2874], sessionId# [0:0:0] 2026-01-10T12:16:30.637280Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:16:30.637469Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:16:30.648504Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:30.652939Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:30.654087Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-10T12:16:30.665425Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-10T12:16:30.665515Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:16:30.665940Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:30.665986Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-10T12:16:30.666086Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2880], serverId# [3:1062:2881], sessionId# [0:0:0] 2026-01-10T12:16:30.666229Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:30.666282Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:30.666326Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:16:30.666393Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:30.667246Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:16:30.667580Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:16:30.667778Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:30.667820Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:30.667867Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:16:30.668072Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:30.668166Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:30.668735Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2026-01-10T12:16:30.669061Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-10T12:16:30.669226Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2026-01-10T12:16:30.669271Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2026-01-10T12:16:30.692435Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-10T12:16:30.692502Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2026-01-10T12:16:30.692612Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:16:30.692649Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:16:30.692689Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2026-01-10T12:16:30.692814Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:16:30.692870Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:16:30.692912Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TColumnShardTestSchema::RebootOneColdTier >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream |90.1%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] |90.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047942.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168047942.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047942.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148047942.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046742.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046742.000000s;Name=;Codec=}; 2026-01-10T12:15:44.982828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:45.022015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:45.022368Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:45.031735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:45.031993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:45.032260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:45.032389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:45.032531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:45.032649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:45.032747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:45.032848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:45.032991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:45.033104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.033202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:45.033296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:45.033400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:45.068386Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:45.068787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:45.068843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:45.069008Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:45.069172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:45.069261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:45.069327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:45.069427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:45.069503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:45.069548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:45.069582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:45.069730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:45.069784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:45.069826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:45.069853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:45.069946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:45.070009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:45.070081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:45.070115Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:45.070166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:45.070201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:45.070228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:45.070269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:45.070318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:45.070347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:45.070522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:45.070583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:45.070626Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:45.070740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:45.070792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.070817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:45.070862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:15:45.070898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:15:45.070924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:15:45.070967Z node 1 :TX_COLUM ... d;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-10T12:16:31.982616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=83; 2026-01-10T12:16:31.982651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3513; 2026-01-10T12:16:31.982698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3626; 2026-01-10T12:16:31.982755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-10T12:16:31.982821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=26; 2026-01-10T12:16:31.982853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4237; 2026-01-10T12:16:31.982972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2026-01-10T12:16:31.983071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=56; 2026-01-10T12:16:31.983184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=70; 2026-01-10T12:16:31.983314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=89; 2026-01-10T12:16:31.985828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2433; 2026-01-10T12:16:31.988252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2340; 2026-01-10T12:16:31.988336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:16:31.988383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-10T12:16:31.988417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:16:31.988476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2026-01-10T12:16:31.988513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:16:31.988583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2026-01-10T12:16:31.988620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:16:31.988686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-10T12:16:31.988761Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2026-01-10T12:16:31.989006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=210; 2026-01-10T12:16:31.989051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20458; 2026-01-10T12:16:31.989186Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:16:31.989284Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:16:31.989337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:16:31.989396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:16:31.999620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:16:31.999768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:31.999857Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:16:31.999920Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047082489;tx_id=18446744073709551615;;current_snapshot_ts=1768047370376; 2026-01-10T12:16:31.999975Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:16:32.000027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:32.000066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:32.000173Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:32.000368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.192000s; 2026-01-10T12:16:32.001792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:16:32.002622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:16:32.002678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:32.002767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:16:32.002832Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047082489;tx_id=18446744073709551615;;current_snapshot_ts=1768047370376; 2026-01-10T12:16:32.002875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:16:32.002921Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:32.002958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:32.003041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:32.003501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.115000s; 2026-01-10T12:16:32.003550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 >> MoveTable::WithData+Reboot >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> MoveTable::EmptyTable >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin >> MoveTable::RenameToItself_Negative >> MoveTable::WithData+Reboot [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2026-01-10T12:10:22.036740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:22.036824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:22.618358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:24.267835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:10:24.569079Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.569735Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.570390Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4871247581869017030 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.700642Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.701227Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.701438Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12820981344882681214 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.750891Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.751477Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.751698Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4530657047494413551 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.806053Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:10:24.806577Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:10:24.806846Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/001a36/r3tmp/tmpKtl5FB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4302992164571865359 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:10:24.815938Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 Hu ... 6]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:17.777470Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:25.333769Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:25.334180Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:32.538746Z node 154 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.105218s 2026-01-10T12:15:32.538860Z node 154 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.105371s 2026-01-10T12:15:32.538959Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:32.539279Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:32.729453Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:32.730120Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:39.847718Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:39.848280Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:46.866292Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:46.866679Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:53.340641Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:53.341078Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:59.723812Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:15:59.724055Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:06.235170Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:06.235596Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:12.649980Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:12.650414Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:18.794379Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:18.794687Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:25.179455Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:25.179761Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:31.572663Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-10T12:16:31.573049Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> MoveTable::EmptyTable [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData+Reboot [GOOD] Test command err: 2026-01-10T12:16:33.580560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:33.614110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:33.614338Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:33.621634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:33.621869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:33.622085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:33.622227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:33.622336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:33.622436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:33.622544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:33.622655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:33.622831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:33.622954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:33.623051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:33.623175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:33.623315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:33.654486Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:33.654864Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:33.654920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:33.655103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:33.655267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:33.655351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:33.655393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:33.655491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:33.655576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:33.655630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:33.655676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:33.655863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:33.655927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:33.655977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:33.656029Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:33.656144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:33.656201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:33.656251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:33.656280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:33.656336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:33.656379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:33.656409Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:33.656448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:33.656496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:33.656523Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:33.656737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:33.656902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:33.656940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:33.657065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:33.657139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:33.657179Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:33.657239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:33.657294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:33.657323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:33.657364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:33.657399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:33.657430Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:33.657566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:33.657626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... en=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:16:34.600463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:16:34.600685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.600873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.601044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.601278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:34.601443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.601634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.601912Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:387:2387] finished for tablet 9437184 2026-01-10T12:16:34.602458Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:380:2381];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1487311,"name":"_full_task","f":1487311,"d_finished":0,"c":0,"l":1499624,"d":12313},"events":[{"name":"bootstrap","f":1487526,"d_finished":1897,"c":1,"l":1489423,"d":1897},{"a":1498908,"name":"ack","f":1497061,"d_finished":1695,"c":1,"l":1498756,"d":2411},{"a":1498891,"name":"processing","f":1489647,"d_finished":4559,"c":3,"l":1498760,"d":5292},{"name":"ProduceResults","f":1488969,"d_finished":2780,"c":6,"l":1499313,"d":2780},{"a":1499319,"name":"Finish","f":1499319,"d_finished":0,"c":0,"l":1499624,"d":305},{"name":"task_result","f":1489661,"d_finished":2814,"c":2,"l":1496934,"d":2814}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.602578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:34.602923Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:380:2381];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1487311,"name":"_full_task","f":1487311,"d_finished":0,"c":0,"l":1500283,"d":12972},"events":[{"name":"bootstrap","f":1487526,"d_finished":1897,"c":1,"l":1489423,"d":1897},{"a":1498908,"name":"ack","f":1497061,"d_finished":1695,"c":1,"l":1498756,"d":3070},{"a":1498891,"name":"processing","f":1489647,"d_finished":4559,"c":3,"l":1498760,"d":5951},{"name":"ProduceResults","f":1488969,"d_finished":2780,"c":6,"l":1499313,"d":2780},{"a":1499319,"name":"Finish","f":1499319,"d_finished":0,"c":0,"l":1500283,"d":964},{"name":"task_result","f":1489661,"d_finished":2814,"c":2,"l":1496934,"d":2814}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:34.603038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:34.537559Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:16:34.603074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:34.603193Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-10T12:16:34.603926Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-10T12:16:34.604244Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1768047394559:12} readable: {1768047394559:max} at tablet 9437184 2026-01-10T12:16:34.604362Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-10T12:16:34.604412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1768047394559:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:34.604508Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1768047394559:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::EmptyTable [GOOD] Test command err: 2026-01-10T12:16:34.030681Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:34.058247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:34.058459Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:34.065723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:34.065921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:34.066117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:34.066260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:34.066355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:34.066433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:34.066506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:34.066580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:34.066689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:34.066770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.066842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:34.066937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:34.067039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:34.094688Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:34.095076Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:34.095160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:34.095351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.095559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:34.095651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:34.095691Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:34.095799Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:34.095868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:34.095914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:34.095942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:34.096148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.096226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:34.096278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:34.096350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:34.096451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:34.096507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:34.096538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:34.096559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:34.096593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:34.096619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:34.096636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:34.096674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:34.096699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:34.096715Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:34.096920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:34.097028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:34.097071Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:34.097169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:34.097223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.097266Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.097332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:34.097403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:34.097437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:34.097480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:34.097508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:34.097528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:34.097629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:34.097668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2026-01-10T12:16:34.365998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=14; 2026-01-10T12:16:34.366058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=26; 2026-01-10T12:16:34.366099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2296; 2026-01-10T12:16:34.366209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:16:34.366251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:16:34.366320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:16:34.366567Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:34.366603Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:16:34.366661Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.131000s; 2026-01-10T12:16:34.366887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:16:34.366953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:16:34.367051Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:34.367085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:16:34.367209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.097000s; 2026-01-10T12:16:34.367247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:16:35.013642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136481682938176;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.013707Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136481682938176;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:35.025751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136481682938176;op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136687844090624;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:35.025831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136481682938176;op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136687844090624;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2026-01-10T12:16:35.025874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136481682938176;op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395000;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136687844090624;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-10T12:16:35.026157Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:35.026251Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047395000 at tablet 9437184, mediator 0 2026-01-10T12:16:35.026285Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-10T12:16:35.026529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.026614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.026656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.026738Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:35.033403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1768047395000;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-10T12:16:35.033485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:16:35.033582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-10T12:16:35.033629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-10T12:16:35.033830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:35.044834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-10T12:16:35.068203Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-10T12:16:35.069050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136481682988128;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=2; 2026-01-10T12:16:35.069107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136481682988128;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-10T12:16:35.081146Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136481682988128;op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136687844158784;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:35.081239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136481682988128;op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136687844158784;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:16:35.081291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136481682988128;op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1768047395004;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136687844158784;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=11; 2026-01-10T12:16:35.081555Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:35.081704Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047395004 at tablet 9437184, mediator 0 2026-01-10T12:16:35.081770Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-10T12:16:35.082021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=11;event=move_table_progress;old_path_id=1;new_path_id=2;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-10T12:16:35.093843Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] Test command err: 2026-01-10T12:16:34.911122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:34.932326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:34.932520Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:34.939984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:34.940207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:34.940471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:34.940587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:34.940656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:34.940718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:34.940824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:34.940946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:34.941066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:34.941175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.941246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:34.941362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:34.941451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:34.970459Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:34.970737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:34.970786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:34.970948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.971082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:34.971161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:34.971195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:34.971283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:34.971335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:34.971369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:34.971399Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:34.971547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.971594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:34.971641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:34.971680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:34.971750Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:34.971790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:34.971821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:34.971841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:34.971877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:34.971905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:34.971932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:34.971965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:34.971995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:34.972013Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:34.972231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:34.972315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:34.972355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:34.972455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:34.972495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.972522Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.972566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:34.972615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:34.972637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:34.972668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:34.972692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:34.972715Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:34.972834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:34.972875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mon_data.cpp:29;EXECUTE:storages_managerLoadingTime=292; 2026-01-10T12:16:35.240318Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-10T12:16:35.240352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2026-01-10T12:16:35.240378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:16:35.240442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-01-10T12:16:35.240481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:16:35.240574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2026-01-10T12:16:35.240618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:16:35.240677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-10T12:16:35.240722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=17; 2026-01-10T12:16:35.240770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=21; 2026-01-10T12:16:35.240800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2678; 2026-01-10T12:16:35.240927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:16:35.240973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:16:35.241045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:16:35.241329Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:35.241367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:16:35.241431Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.113000s; 2026-01-10T12:16:35.241686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:16:35.241758Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:16:35.241790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:35.241837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-10T12:16:35.241949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.081000s; 2026-01-10T12:16:35.241998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:16:35.543760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136665252944192;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.543821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136665252944192;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-10T12:16:35.556214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136665252944192;op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136871414096640;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:16:35.556324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136665252944192;op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136871414096640;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2026-01-10T12:16:35.556376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136665252944192;op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768047395877;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=136871414096640;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-10T12:16:35.556681Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:35.556780Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047395877 at tablet 9437184, mediator 0 2026-01-10T12:16:35.556831Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-10T12:16:35.557146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.557257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.557319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:16:35.557435Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:16:35.564929Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1768047395877;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-10T12:16:35.565033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:16:35.565159Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-10T12:16:35.565219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-10T12:16:35.565430Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:16:35.580547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-10T12:16:35.605228Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-10T12:16:35.606253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136665252994144;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=1; 2026-01-10T12:16:35.606331Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136665252994144;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Rename to existing table; 2026-01-10T12:16:35.618831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136665252994144;op_tx=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:16:35.618933Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136665252994144;op_tx=11:TX_KIND_SCHEMA;min=1768047395880;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=Rename to existing table;tablet_id=9437184;tx_id=11; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047955.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047955.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047955.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047955.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047955.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148047955.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=168047955.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047955.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046755.000000s;Name=;Codec=}; 2026-01-10T12:15:56.083831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:56.118856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:56.119154Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:56.127525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:56.127857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:56.128119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:56.128314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:56.128460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:56.128616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:56.128742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:56.128864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:56.129028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:56.129158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.129270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:56.129383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:56.129512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:56.163173Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:56.163579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:56.163670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:56.163869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.164066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:56.164174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:56.164248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:56.164377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:56.164475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:56.164528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:56.164568Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:56.164769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.164843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:56.164893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:56.164930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:56.165035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:56.165123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:56.165195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:56.165235Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:56.165298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:56.165351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:56.165393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:56.165445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:56.165495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:56.165529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:56.165756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:56.165820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:56.165863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:56.166016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:56.166069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.166108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.166169Z node 1 :TX ... LoadingTime=12; 2026-01-10T12:16:35.732538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=103; 2026-01-10T12:16:35.732581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7577; 2026-01-10T12:16:35.732623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7696; 2026-01-10T12:16:35.732725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=33; 2026-01-10T12:16:35.732826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=46; 2026-01-10T12:16:35.732878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8420; 2026-01-10T12:16:35.733062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=106; 2026-01-10T12:16:35.733182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2026-01-10T12:16:35.733322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=93; 2026-01-10T12:16:35.733429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=70; 2026-01-10T12:16:35.738599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5102; 2026-01-10T12:16:35.743404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4683; 2026-01-10T12:16:35.743524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-10T12:16:35.743622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-10T12:16:35.743684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:16:35.743782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-10T12:16:35.743836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:16:35.743927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-10T12:16:35.743966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:16:35.744025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2026-01-10T12:16:35.744138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=80; 2026-01-10T12:16:35.744458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=273; 2026-01-10T12:16:35.744505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29460; 2026-01-10T12:16:35.744645Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=24365192;raw_bytes=35131129;count=5;records=400000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:16:35.744756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:16:35.744811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:16:35.744880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:16:35.765262Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:16:35.765426Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:35.765547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:16:35.765624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047093796;tx_id=18446744073709551615;;current_snapshot_ts=1768047357388; 2026-01-10T12:16:35.765670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:16:35.765714Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:35.765754Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:35.765843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:35.766115Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.138000s; 2026-01-10T12:16:35.768202Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:16:35.768576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:16:35.768642Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:16:35.768737Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:16:35.768808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047093796;tx_id=18446744073709551615;;current_snapshot_ts=1768047357388; 2026-01-10T12:16:35.768851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:16:35.768899Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:35.768947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:35.769054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:35.769505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.187000s; 2026-01-10T12:16:35.769559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:15:58.830037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:15:58.830172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.830225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:15:58.830264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:15:58.830314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:15:58.830354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:15:58.830441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:15:58.830541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:15:58.831587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:15:58.831987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:15:58.928766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:58.928849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:58.946563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:15:58.946698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:15:58.946871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:15:58.957103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:15:58.957341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:15:58.958148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:15:58.958357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:15:58.961216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.961419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:15:58.962690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:15:58.962779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:15:58.962876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:15:58.962931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:15:58.963003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:15:58.963224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:15:59.119130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.119947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.120989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:15:59.121139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... [1:3868:5830], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:5204 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2C3EF812-8ACE-4E8C-9240-5EE978FC3540 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2026-01-10T12:16:36.582303Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3869:5831], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2026-01-10T12:16:36.582533Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3868:5830] 2026-01-10T12:16:36.582616Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3869:5831], sender# [1:3868:5830], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:5204 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C9F2616E-9376-4BC3-A84D-345DA2D3E05A amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2026-01-10T12:16:36.585768Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3869:5831], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-10T12:16:36.585819Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3869:5831], success# 1, error# , multipart# 1, uploadId# 1 2026-01-10T12:16:36.590945Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3869:5831], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:5204 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E602E76A-DA40-4E0F-B251-07CC7CE40856 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2026-01-10T12:16:36.599294Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3869:5831], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2026-01-10T12:16:36.599732Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3868:5830], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:36.614791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:36.614865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:36.615052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:36.615169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:36.615262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:36.615304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:36.615344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:36.615387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:36.615566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:36.620136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:36.620600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:36.620661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:36.620830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:36.620872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:36.620909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:36.620940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:36.620969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:36.621023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:16:36.621063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:36.621093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:36.621119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:36.621229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:36.626024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:36.626106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3853:5816] TestWaitNotification: OK eventTxId 102 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> KqpScripting::StreamOperationTimeout [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 17735, MsgBus: 14819 2026-01-10T12:15:00.648724Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705335444439911:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:00.649721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:00.700138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:01.468640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:01.468759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:01.487013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:01.536269Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705335444439888:2081] 1768047300599982 != 1768047300599985 2026-01-10T12:15:01.580626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:01.585579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:01.660877Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:01.764706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:01.992802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:01.992826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:01.992833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:01.993127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:03.125824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:03.251664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:03.469698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:03.780973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:03.930597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:05.609545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705335444439911:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:05.609616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:15:06.669743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705361214245544:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.669881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.674903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705361214245553:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:06.675019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.102083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.152795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.213273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.278397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.330973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.404951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.515912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.612646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:07.770466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705365509213727:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.770561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.770837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705365509213732:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.770878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705365509213733:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.770977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:15:07.778598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:15:07.797361Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705365509213736:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:15:07.859901Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705365509213787:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:15:11.666473Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding ... 389906, txId: 281474976711268] shutting down 2026-01-10T12:16:30.214074Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047390249, txId: 281474976711271] shutting down Trying to start YDB, gRPC: 2125, MsgBus: 26509 2026-01-10T12:16:31.403580Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705725262002978:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:31.403649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:31.416954Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:31.515506Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:31.521521Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:31.521617Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:31.523615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:31.600843Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:31.600867Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:31.600876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:31.600985Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:31.610050Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:32.119955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:32.135885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:32.195009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:32.393798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:32.415980Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:32.470979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.447901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705742441873995:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.448028Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.448394Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705742441874004:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.448476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.518989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.551323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.578445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.607858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.637971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.669176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.701593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.747337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:35.820071Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705742441874878:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.820187Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.820215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705742441874883:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.820482Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705742441874885:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.820553Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:35.823732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:35.833237Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705742441874886:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:16:35.896929Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705742441874938:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:36.403995Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705725262002978:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:36.404096Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:37.834108Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=2&id=ZjMwNDNmMzctYTZiMTZlMTktZjk0MGNhNTQtOGZmNjYxMDA= } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:00.881248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:00.881356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:00.881401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:00.881442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:00.881481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:00.881515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:00.881567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:00.881646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:00.882558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:00.882892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:00.980438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:00.980513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:01.002407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:01.002541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:01.002730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:01.017933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:01.018189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:01.019005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:01.019264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:01.022007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:01.022197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:01.023481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:01.023556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:01.023635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:01.023681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:01.023741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:01.024016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:01.161376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.162537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.162711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.162804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.162874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.162983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:01.163754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 38.187543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:16:38.187688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:16:38.187854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:38.196461Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:3860:5822], attempt# 0 2026-01-10T12:16:38.222339Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3860:5822], sender# [1:3859:5821] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:19281 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DA6C04E1-BB5A-4D13-AF63-17F23C8EFACA amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-10T12:16:38.228539Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5822], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19281 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 144A2AD1-32F3-4397-8449-34161C8D7631 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-10T12:16:38.244830Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5822], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-10T12:16:38.245768Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3859:5821] 2026-01-10T12:16:38.246300Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3860:5822], sender# [1:3859:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:16:38.249681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:19281 Accept: */* Connection: Upgrade, HTTP2-Settings2026-01-10T12:16:38.249767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 841FDD39-465C-4BA5-93E3-8977AE5AD9A8 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2026-01-10T12:16:38.250077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:38.250131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:16:38.250797Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5822], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2026-01-10T12:16:38.250855Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3860:5822], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-10T12:16:38.251371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.251446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:16:38.252188Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3859:5821], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:38.262038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:38.262182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:16:38.262230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:16:38.262278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:16:38.262328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:16:38.262439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:16:38.271435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:16:38.298950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.299027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:38.299221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.299338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 716 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.299447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:38.299502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.299548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:38.299603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:38.299819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:38.304934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.305732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.305807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:38.305926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:38.305966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.306010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:38.306046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.306083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:38.306168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:742:2720] message: TxId: 102 2026-01-10T12:16:38.306230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.306271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:38.306307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:38.306435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:38.312069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:38.312161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3845:5808] TestWaitNotification: OK eventTxId 102 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:00.452531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:00.452647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:00.452691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:00.452731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:00.452777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:00.452818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:00.452880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:00.452973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:00.454011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:00.454370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:00.559466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:00.559540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:00.581651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:00.581787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:00.581962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:00.592839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:00.593110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:00.593994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:00.594268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:00.597205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:00.597403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:00.598827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:00.598921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:00.599013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:00.599069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:00.599150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:00.599504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:00.781878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.783921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:00.784569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27225 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0B27B8AD-34A0-4C4C-A681-AEA8A4BC67E8 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2026-01-10T12:16:38.488535Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3869:5831], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2026-01-10T12:16:38.488720Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3868:5830] 2026-01-10T12:16:38.488819Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3869:5831], sender# [1:3868:5830], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27225 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BC4C12F8-3019-427F-978A-E43D21E7CE6A amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2026-01-10T12:16:38.491560Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3869:5831], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-10T12:16:38.491638Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3869:5831], success# 1, error# , multipart# 1, uploadId# 1 2026-01-10T12:16:38.497207Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3869:5831], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27225 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1F5308C2-DC61-4B3C-8830-E1DDC91F791C amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2026-01-10T12:16:38.505620Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3869:5831], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2026-01-10T12:16:38.506211Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3868:5830], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-10T12:16:38.523484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.523579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:16:38.523789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.523917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-10T12:16:38.524033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:38.524119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.524173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:16:38.524241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:16:38.524408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:38.528646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.529011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:16:38.529072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:16:38.529203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:38.529248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.529290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:16:38.529350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.529397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:16:38.529473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:16:38.529526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:16:38.529582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:16:38.529618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:16:38.529757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:16:38.534286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:16:38.534355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3853:5816] TestWaitNotification: OK eventTxId 102 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] |90.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:37.503147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:37.528095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:37.528292Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:37.533860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:37.534120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:37.534354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:37.534487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:37.534595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:37.534694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:37.534820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:37.534932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:37.535057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:37.535165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:37.535301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:37.535400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:37.535513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:37.563646Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:37.564005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:37.564091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:37.564280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:37.564451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:37.564534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:37.564579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:37.564700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:37.564766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:37.564816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:37.564847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:37.565019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:37.565085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:37.565126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:37.565170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:37.565275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:37.565333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:37.565386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:37.565419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:37.565485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:37.565532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:37.565562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:37.565612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:37.565654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:37.565682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:37.565897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:37.566020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:37.566062Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:37.566213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:37.566260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:37.566300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:37.566354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:37.566411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:37.566442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:37.566483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:37.566536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:37.566573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:37.566715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:37.566772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 7184;request_tx=104:TX_KIND_SCHEMA;min=1768047398599;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136916461866016;op_tx=104:TX_KIND_SCHEMA;min=1768047398599;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1768047398599;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137122602120384;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-10T12:16:40.232967Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:40.233090Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047398599 at tablet 9437184, mediator 0 2026-01-10T12:16:40.233143Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-10T12:16:40.233382Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-10T12:16:40.245569Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-10T12:16:40.246051Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047398599:max} readable: {1768047398599:max} at tablet 9437184 2026-01-10T12:16:40.246206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:40.249740Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047398599:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:40.249830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047398599:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:40.250615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047398599:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:40.252315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047398599:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:40.305230Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047398599:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2026-01-10T12:16:40.306493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:40.306727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:40.307038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.307177Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.307394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:40.307531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.307727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.307914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2026-01-10T12:16:40.308286Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3225591,"name":"_full_task","f":3225591,"d_finished":0,"c":0,"l":3228407,"d":2816},"events":[{"name":"bootstrap","f":3225944,"d_finished":1695,"c":1,"l":3227639,"d":1695},{"a":3227795,"name":"ack","f":3227795,"d_finished":0,"c":0,"l":3228407,"d":612},{"a":3227780,"name":"processing","f":3227780,"d_finished":0,"c":0,"l":3228407,"d":627},{"name":"ProduceResults","f":3227328,"d_finished":639,"c":2,"l":3228187,"d":639},{"a":3228195,"name":"Finish","f":3228195,"d_finished":0,"c":0,"l":3228407,"d":212}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.308361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:40.308747Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3225591,"name":"_full_task","f":3225591,"d_finished":0,"c":0,"l":3228827,"d":3236},"events":[{"name":"bootstrap","f":3225944,"d_finished":1695,"c":1,"l":3227639,"d":1695},{"a":3227795,"name":"ack","f":3227795,"d_finished":0,"c":0,"l":3228827,"d":1032},{"a":3227780,"name":"processing","f":3227780,"d_finished":0,"c":0,"l":3228827,"d":1047},{"name":"ProduceResults","f":3227328,"d_finished":639,"c":2,"l":3228187,"d":639},{"a":3228195,"name":"Finish","f":3228195,"d_finished":0,"c":0,"l":3228827,"d":632}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.308855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:40.252251Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:40.308910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:40.309057Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-10T12:16:38.198967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:38.232358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:38.232612Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:38.238935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:38.239149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:38.239361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:38.239465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:38.239541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:38.239608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:38.239709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:38.239794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:38.239875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:38.239953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.240049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:38.240166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:38.240257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:38.264900Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:38.265251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:38.265309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:38.265457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.265600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:38.265672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:38.265719Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:38.265809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:38.265858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:38.265892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:38.265914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:38.266047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.266097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:38.266127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:38.266158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:38.266239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:38.266282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:38.266314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:38.266349Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:38.266387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:38.266417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:38.266439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:38.266479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:38.266506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:38.266525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:38.266700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:38.266839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:38.266878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:38.266976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:38.267006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.267027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.267071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:38.267112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:38.267134Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:38.267190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:38.267220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:38.267242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:38.267342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:38.267417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... KIND_SCHEMA;min=1768047399295;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136557899205664;op_tx=104:TX_KIND_SCHEMA;min=1768047399295;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1768047399295;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136764039460032;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-10T12:16:40.629397Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:16:40.629487Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047399295 at tablet 9437184, mediator 0 2026-01-10T12:16:40.629526Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-10T12:16:40.629755Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-10T12:16:40.641987Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-10T12:16:40.642593Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047399295:max} readable: {1768047399295:max} at tablet 9437184 2026-01-10T12:16:40.642753Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:40.650828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047399295:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:40.650970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047399295:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:40.651742Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047399295:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:40.653772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047399295:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:40.710114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047399295:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2026-01-10T12:16:40.711451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:40.711738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:40.712132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.712303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.712644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:40.712823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.713028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.713267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2026-01-10T12:16:40.713769Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":2968769,"name":"_full_task","f":2968769,"d_finished":0,"c":0,"l":2972061,"d":3292},"events":[{"name":"bootstrap","f":2969057,"d_finished":2020,"c":1,"l":2971077,"d":2020},{"a":2971337,"name":"ack","f":2971337,"d_finished":0,"c":0,"l":2972061,"d":724},{"a":2971318,"name":"processing","f":2971318,"d_finished":0,"c":0,"l":2972061,"d":743},{"name":"ProduceResults","f":2970694,"d_finished":770,"c":2,"l":2971806,"d":770},{"a":2971813,"name":"Finish","f":2971813,"d_finished":0,"c":0,"l":2972061,"d":248}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.713874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:40.714304Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":2968769,"name":"_full_task","f":2968769,"d_finished":0,"c":0,"l":2972653,"d":3884},"events":[{"name":"bootstrap","f":2969057,"d_finished":2020,"c":1,"l":2971077,"d":2020},{"a":2971337,"name":"ack","f":2971337,"d_finished":0,"c":0,"l":2972653,"d":1316},{"a":2971318,"name":"processing","f":2971318,"d_finished":0,"c":0,"l":2972653,"d":1335},{"name":"ProduceResults","f":2970694,"d_finished":770,"c":2,"l":2971806,"d":770},{"a":2971813,"name":"Finish","f":2971813,"d_finished":0,"c":0,"l":2972653,"d":840}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:40.714401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:40.653719Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:40.714445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:40.714585Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TColumnShardTestSchema::RebootExportWithLostAnswer >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TTopicApiDescribes::GetPartitionDescribe >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TestShred::ShredWithMerge >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:20.890799Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:20.894740Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:20.895172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:20.920215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:20.920475Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:20.927871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:20.928118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:20.928367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:20.928487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:20.928564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:20.928666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:20.928763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:20.928849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:20.928924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:20.928991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.929075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:20.929152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:20.929252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:20.950024Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:20.955504Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:20.955993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:20.956069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:20.956329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:20.956532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:20.956620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:20.956681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:20.956809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:20.956877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:20.956931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:20.956970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:20.957166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:20.957242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:20.957287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:20.957317Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:20.957412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:20.957464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:20.957543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:20.957582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:20.957657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:20.957707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:20.957738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:20.957788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:20.957826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:20.957922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:20.958157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:20.958223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:20.958263Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:20.958401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:20.958450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.958486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:20.958561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:20.958603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:20.958636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:20.958683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:20.958731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... =[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:16:46.271725Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:16:46.272125Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:46.272404Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.272472Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:46.272716Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:16:46.272803Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-10T12:16:46.273225Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-10T12:16:46.273494Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.273687Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.273926Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.274204Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:46.274440Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.274661Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.275086Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:485:2489] finished for tablet 9437184 2026-01-10T12:16:46.275812Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:484:2488];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":25864125,"name":"_full_task","f":25864125,"d_finished":0,"c":0,"l":25875322,"d":11197},"events":[{"name":"bootstrap","f":25864531,"d_finished":1710,"c":1,"l":25866241,"d":1710},{"a":25874305,"name":"ack","f":25872195,"d_finished":1926,"c":1,"l":25874121,"d":2943},{"a":25874275,"name":"processing","f":25866490,"d_finished":4565,"c":3,"l":25874126,"d":5612},{"name":"ProduceResults","f":25865591,"d_finished":3303,"c":6,"l":25874855,"d":3303},{"a":25874864,"name":"Finish","f":25874864,"d_finished":0,"c":0,"l":25875322,"d":458},{"name":"task_result","f":25866513,"d_finished":2556,"c":2,"l":25871886,"d":2556}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.275967Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:46.276680Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:484:2488];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":25864125,"name":"_full_task","f":25864125,"d_finished":0,"c":0,"l":25876168,"d":12043},"events":[{"name":"bootstrap","f":25864531,"d_finished":1710,"c":1,"l":25866241,"d":1710},{"a":25874305,"name":"ack","f":25872195,"d_finished":1926,"c":1,"l":25874121,"d":3789},{"a":25874275,"name":"processing","f":25866490,"d_finished":4565,"c":3,"l":25874126,"d":6458},{"name":"ProduceResults","f":25865591,"d_finished":3303,"c":6,"l":25874855,"d":3303},{"a":25874864,"name":"Finish","f":25874864,"d_finished":0,"c":0,"l":25876168,"d":1304},{"name":"task_result","f":25866513,"d_finished":2556,"c":2,"l":25871886,"d":2556}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:46.276812Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:46.261783Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-10T12:16:46.276877Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:46.277115Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:35.608565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:35.608678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:35.608722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:35.608777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:35.608833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:35.608882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:35.608949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:35.609055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:35.610073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:35.610379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:35.831407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:35.831492Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:35.832541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:35.848506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:35.849042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:35.849231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:35.857461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:35.857813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:35.858657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:35.859243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:35.862336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:35.862548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:35.863962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:35.864036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:35.864117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:35.864172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:35.864225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:35.864443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:35.872038Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:36.021323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:36.021589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.021785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:36.021845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:36.022123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:36.022202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:36.025434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:36.025672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:36.025931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.026004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:36.026070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:36.026113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:36.028343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.028423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:36.028473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:36.030350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.030417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.030472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.030542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:36.034985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:36.045204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:36.045427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:36.046655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:36.046826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:36.046890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.047212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:36.047290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.047487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:36.047596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:36.049893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-10T12:16:46.511797Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-10T12:16:46.511839Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-10T12:16:46.511872Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-10T12:16:46.511902Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-10T12:16:46.511924Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-10T12:16:46.513728Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.513834Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.513876Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:46.513912Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:16:46.513951Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:16:46.515448Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.515547Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.515587Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:46.515627Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:16:46.515666Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:16:46.517524Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.517625Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.517665Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:46.517701Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-10T12:16:46.517742Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:16:46.518346Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.518433Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:46.518465Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:46.518497Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-10T12:16:46.518532Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-10T12:16:46.518608Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:16:46.521763Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:46.521918Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:46.523119Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:46.523201Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:16:46.524428Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:16:46.524461Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:16:46.525643Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:16:46.525719Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.525744Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2681:4670] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:16:46.526635Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:16:46.526672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:16:46.526729Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:16:46.526749Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:16:46.526789Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:16:46.526808Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:16:46.526848Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:16:46.526865Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:16:46.526903Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:16:46.526921Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:16:46.528285Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:16:46.528354Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.528376Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2684:4673] 2026-01-10T12:16:46.528554Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:16:46.528654Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:16:46.528684Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.528704Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2684:4673] 2026-01-10T12:16:46.528776Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.528797Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2684:4673] 2026-01-10T12:16:46.528870Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:16:46.528942Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:16:46.528992Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.529012Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2684:4673] 2026-01-10T12:16:46.529083Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:16:46.529103Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2684:4673] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TestShred::ManualLaunch3Cycles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:29.299795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:29.299903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.299947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:29.299987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:29.307330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:29.307451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:29.307569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.307663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:29.308804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:29.309187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:29.465795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:29.465907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:29.466922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:29.482096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:29.482474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:29.482615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:29.491767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:29.492135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:29.492935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:29.493433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:29.496480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:29.496699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:29.498053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:29.498127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:29.498184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:29.498237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:29.498280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:29.498486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.505671Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:29.653882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:29.654174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.654411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:29.654471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:29.654773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:29.654862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:29.661090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:29.661375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:29.661612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.661677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:29.661724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:29.661761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:29.664123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.664207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:29.664258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:29.666221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.666277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.666333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:29.666414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:29.670633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:29.672956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:29.673168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:29.674269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:29.674425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:29.674488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:29.674796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:29.674857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:29.675040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:29.675134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:29.677361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-10T12:16:47.967892Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-10T12:16:47.967926Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-10T12:16:47.967951Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-10T12:16:47.967970Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-10T12:16:47.967991Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-10T12:16:47.969411Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.969498Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.969529Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:47.969558Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:16:47.969594Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:16:47.970898Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.971003Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.971040Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:47.971073Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:16:47.971115Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:16:47.972599Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.972672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.972699Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:47.972724Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-10T12:16:47.972760Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:16:47.973211Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.973290Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:16:47.973326Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:16:47.973360Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-10T12:16:47.973391Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-10T12:16:47.973458Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:16:47.976019Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:47.976315Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:47.978018Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:16:47.978107Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:16:47.979457Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:16:47.979503Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:16:47.981004Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:16:47.981101Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.981134Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2681:4670] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:16:47.982272Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:16:47.982307Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:16:47.982386Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:16:47.982415Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:16:47.982468Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:16:47.982490Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:16:47.982536Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:16:47.982560Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:16:47.982619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:16:47.982639Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:16:47.984782Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:16:47.984886Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.984934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2684:4673] 2026-01-10T12:16:47.985233Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:16:47.985409Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:16:47.985465Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.985498Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2684:4673] 2026-01-10T12:16:47.985625Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.985662Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2684:4673] 2026-01-10T12:16:47.985789Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:16:47.985913Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:16:47.986001Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.986035Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2684:4673] 2026-01-10T12:16:47.986154Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:16:47.986188Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2684:4673] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser >> TestShred::ShredWithSplit >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:17.602184Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:17.606552Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:17.607059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:17.637181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:17.637604Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:17.646539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:17.646856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:17.647155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:17.647299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:17.647419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:17.647563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:17.647679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:17.647829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:17.647950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:17.648058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:17.648292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:17.648432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:17.648573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:17.677328Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:17.683332Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:17.683785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:17.683850Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:17.684122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:17.684327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:17.684461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:17.684516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:17.684651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:17.684733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:17.684799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:17.684857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:17.685082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:17.685329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:17.685376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:17.685410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:17.685508Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:17.685576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:17.685629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:17.685661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:17.685722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:17.685762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:17.685830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:17.685909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:17.685959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:17.686184Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:17.686454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:17.686531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:17.686575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:17.686730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:17.686786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:17.686825Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:17.686881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:17.686928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:17.686959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:17.687010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:17.687058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:50.279414Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:16:50.279487Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:16:50.279650Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:50.279831Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.279875Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:16:50.280106Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:16:50.280172Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-10T12:16:50.280428Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:720:2700];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-10T12:16:50.280613Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.280728Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.280885Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.281067Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:50.281185Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.281264Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.281555Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:721:2701] finished for tablet 9437184 2026-01-10T12:16:50.282037Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:720:2700];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":33170948,"name":"_full_task","f":33170948,"d_finished":0,"c":0,"l":33182567,"d":11619},"events":[{"name":"bootstrap","f":33171200,"d_finished":1328,"c":1,"l":33172528,"d":1328},{"a":33181999,"name":"ack","f":33179138,"d_finished":2608,"c":2,"l":33181864,"d":3176},{"a":33181985,"name":"processing","f":33172681,"d_finished":5513,"c":5,"l":33181868,"d":6095},{"name":"ProduceResults","f":33172048,"d_finished":3559,"c":9,"l":33182230,"d":3559},{"a":33182234,"name":"Finish","f":33182234,"d_finished":0,"c":0,"l":33182567,"d":333},{"name":"task_result","f":33172704,"d_finished":2798,"c":3,"l":33178967,"d":2798}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.282165Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:720:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:50.282671Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:720:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":33170948,"name":"_full_task","f":33170948,"d_finished":0,"c":0,"l":33183176,"d":12228},"events":[{"name":"bootstrap","f":33171200,"d_finished":1328,"c":1,"l":33172528,"d":1328},{"a":33181999,"name":"ack","f":33179138,"d_finished":2608,"c":2,"l":33181864,"d":3785},{"a":33181985,"name":"processing","f":33172681,"d_finished":5513,"c":5,"l":33181868,"d":6704},{"name":"ProduceResults","f":33172048,"d_finished":3559,"c":9,"l":33182230,"d":3559},{"a":33182234,"name":"Finish","f":33182234,"d_finished":0,"c":0,"l":33183176,"d":942},{"name":"task_result","f":33172704,"d_finished":2798,"c":3,"l":33178967,"d":2798}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:50.282775Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:50.268060Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-10T12:16:50.282820Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:50.282981Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TestShred::Run3CyclesForTables >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> TestShred::SimpleTestForAllSupportedObjects >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter [GOOD] >> TestShred::Run3CyclesForTopics >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous >> Cdc::SupportedTypes [GOOD] >> Cdc::StringEscaping >> TestShred::ShredWithMerge [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2026-01-10T12:16:42.843651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705772923017403:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:42.843775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:42.868708Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705771888338259:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:42.871287Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:42.872709Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:16:42.878561Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:16:43.044616Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:43.048362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:43.078317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:43.078446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:43.079420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:43.079461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:43.109533Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:16:43.111685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:43.112521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:43.180218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:43.230332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001640/r3tmp/yandex3q6s8z.tmp 2026-01-10T12:16:43.230370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001640/r3tmp/yandex3q6s8z.tmp 2026-01-10T12:16:43.230586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001640/r3tmp/yandex3q6s8z.tmp 2026-01-10T12:16:43.230788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:43.271034Z INFO: TTestServer started on Port 18015 GrpcPort 11774 2026-01-10T12:16:43.272420Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:43.347264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:11774 2026-01-10T12:16:43.450291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:43.522177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:43.849725Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:43.875839Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:45.714654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705785807920663:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.714654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705785807920672:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.714826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.715176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705785807920679:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.715243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.718833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:45.723691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705785807920714:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.723774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.725887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705785807920720:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.725986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:45.738902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705785807920678:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:16:46.035671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705785807920758:3094] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:46.066831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:46.068210Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593705784773240483:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:16:46.068513Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593705790102888080:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:16:46.068725Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NDNmMjMzODYtOTczZmE5ODUtZTNkMGU2ZjgtMzIyYjQ3MTQ=, ActorId: [2:7593705784773240458:2300], ActorState: ExecuteState, LegacyTraceId: 01kekxctpdb9pr0eck6zeg19pn, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:16:46.068939Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWFiYTc3NDUtYTBjMWNkZjEtODI5YjRlNjItY2VjYzE3Mjk=, ActorId: [1:7593705785807920661:2333], ActorState: ExecuteState, LegacyTraceId: 01kekxctkw27a8cd4n2qawt6yq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of ... 13 [1:7593705815872693519:2467] 2026-01-10T12:16:52.664474Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037898][Partition][11][StateInit] bootstrapping 11 [2:7593705814838012511:2392] 2026-01-10T12:16:52.666294Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2026-01-10T12:16:52.666330Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 2 2026-01-10T12:16:52.666485Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.666580Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.666906Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [2:7593705814838012470:2391] 2026-01-10T12:16:52.667065Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [2:7593705814838012473:2391] 2026-01-10T12:16:52.667176Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.667283Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.667556Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:4:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.667582Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7593705815872693447:2466] 2026-01-10T12:16:52.667715Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037899][Partition][4][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [2:7593705814838012483:2390] 2026-01-10T12:16:52.667753Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7593705815872693442:2466] 2026-01-10T12:16:52.668495Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:16:52.668518Z node 1 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-10T12:16:52.669208Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][8][StateInit] bootstrapping 8 [1:7593705815872693572:2468] 2026-01-10T12:16:52.671161Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.671230Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.671477Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [2:7593705814838012510:2392] 2026-01-10T12:16:52.671608Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [2:7593705814838012511:2392] 2026-01-10T12:16:52.671678Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.671712Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [1:7593705815872693573:2468] 2026-01-10T12:16:52.671757Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.671971Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [1:7593705815872693518:2467] 2026-01-10T12:16:52.672096Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [1:7593705815872693519:2467] 2026-01-10T12:16:52.672662Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 2, Generation 2 2026-01-10T12:16:52.672691Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 2, Generation 2 2026-01-10T12:16:52.672705Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2026-01-10T12:16:52.672719Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 2, Generation 2 2026-01-10T12:16:52.672732Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 1, Generation 2 2026-01-10T12:16:52.677938Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.678199Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:16:52.678388Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [1:7593705815872693572:2468] 2026-01-10T12:16:52.678783Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [1:7593705815872693573:2468] 2026-01-10T12:16:52.679236Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 1, Generation 2 2026-01-10T12:16:53.619513Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-10T12:16:53.619610Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2026-01-10T12:16:53.619645Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7593705820167660975:2484]: Bootstrap 2026-01-10T12:16:53.620032Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705820167660975:2484]: Request location 2026-01-10T12:16:53.620200Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660977:2485] connected; active server actors: 1 2026-01-10T12:16:53.620278Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-10T12:16:53.620319Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705820167660975:2484]: Got location 2026-01-10T12:16:53.620440Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660977:2485] disconnected. 2026-01-10T12:16:53.620467Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660977:2485] disconnected; active server actors: 1 2026-01-10T12:16:53.620485Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660977:2485] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-01-10T12:16:53.621807Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-10T12:16:53.621876Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2026-01-10T12:16:53.621910Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7593705820167660979:2486]: Bootstrap 2026-01-10T12:16:53.622301Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7593705820167660979:2486]: Request location 2026-01-10T12:16:53.622438Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660982:2488] connected; active server actors: 1 2026-01-10T12:16:53.622506Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 1, Generation 2 2026-01-10T12:16:53.622528Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7593705820167660979:2486]: Got location 2026-01-10T12:16:53.622583Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660982:2488] disconnected. 2026-01-10T12:16:53.622596Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660982:2488] disconnected; active server actors: 1 2026-01-10T12:16:53.622603Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7593705820167660982:2488] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1768047412 nanos: 651000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_location { node_id: 1 generation: 2 } } } } } 2026-01-10T12:16:53.624504Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-10T12:16:53.624578Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2026-01-10T12:16:53.624620Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7593705820167660986:2490]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithMerge [GOOD] Test command err: 2026-01-10T12:16:47.216298Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:60:2102] 2026-01-10T12:16:47.239285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:47.239353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:47.239389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:47.239425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:47.239452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:47.239478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:47.239511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:47.239567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:47.240491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:47.240693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:47.307410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:47.307500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:47.308277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:47.308386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:47.308563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:47.310384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:47.310640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:47.311170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:47.315721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:47.320743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:47.320940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:47.322070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:47.322131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:47.322354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:47.322409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:47.322456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:47.323529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:47.438566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.439945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:47.440007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... MeasureSelfResponseTime 2026-01-10T12:16:54.904124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.904215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.904346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.904379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.936225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.936302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.936451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.936486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.946963Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.947042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.947152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.947186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.979397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.979473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.979598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.979632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.990111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.990193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:54.990305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:54.990338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.022483Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.022559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.022680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.022711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.033181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.033242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.033308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.033330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.065651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.065715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:55.065773Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.065793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:55.076620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1582:3400], Recipient [1:499:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1887 Memory: 90453 Storage: 10149823 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-10T12:16:55.076677Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:16:55.076716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.1887 2026-01-10T12:16:55.076835Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:16:55.076873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:16:55.087364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:16:55.087448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:16:55.087480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:16:55.087761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:16:55.087804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:16:55.087832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:16:55.087899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:16:55.087933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:16:55.087999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.998000s, Timestamp# 1970-01-01T00:01:10.002000Z 2026-01-10T12:16:55.088055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-10T12:16:55.088678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:16:55.091676Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1896:3656], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:16:55.091735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:16:55.091778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:16:55.091968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:16:55.092029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:16:55.092099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] |90.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin |90.2%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] |90.2%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047956.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047956.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047956.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047956.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047956.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046756.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046756.000000s;Name=;Codec=}; 2026-01-10T12:15:56.780565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:56.807018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:56.807203Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:56.813143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:56.813368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:56.813541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:56.813635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:56.813710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:56.813783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:56.813845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:56.813930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:56.814045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:56.814127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.814192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:56.814252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:56.814316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:56.851400Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:56.851973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:56.852051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:56.852393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.852586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:56.852663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:56.852701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:56.852791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:56.852851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:56.852883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:56.852911Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:56.853044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.853092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:56.853122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:56.853143Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:56.853210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:56.853261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:56.853306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:56.853329Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:56.853371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:56.853399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:56.853421Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:56.853451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:56.853477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:56.853498Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:56.853634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:56.853669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:56.853691Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:56.853785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:56.853836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:1 ... shard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:55.880251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:55.880483Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047405102:max} readable: {1768047405102:max} at tablet 9437184 2026-01-10T12:16:55.880626Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:55.880806Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405102:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:55.880920Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405102:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:55.881479Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405102:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:55.883213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405102:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:55.884223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405102:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2026-01-10T12:16:55.884707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:55.884933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:55.885189Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.885396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.885812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:55.885953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.886103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.886346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2026-01-10T12:16:55.886864Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":63030105,"name":"_full_task","f":63030105,"d_finished":0,"c":0,"l":63032376,"d":2271},"events":[{"name":"bootstrap","f":63030379,"d_finished":1017,"c":1,"l":63031396,"d":1017},{"a":63031742,"name":"ack","f":63031742,"d_finished":0,"c":0,"l":63032376,"d":634},{"a":63031724,"name":"processing","f":63031724,"d_finished":0,"c":0,"l":63032376,"d":652},{"name":"ProduceResults","f":63031028,"d_finished":650,"c":2,"l":63032086,"d":650},{"a":63032093,"name":"Finish","f":63032093,"d_finished":0,"c":0,"l":63032376,"d":283}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.886955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:55.887433Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":63030105,"name":"_full_task","f":63030105,"d_finished":0,"c":0,"l":63032970,"d":2865},"events":[{"name":"bootstrap","f":63030379,"d_finished":1017,"c":1,"l":63031396,"d":1017},{"a":63031742,"name":"ack","f":63031742,"d_finished":0,"c":0,"l":63032970,"d":1228},{"a":63031724,"name":"processing","f":63031724,"d_finished":0,"c":0,"l":63032970,"d":1246},{"name":"ProduceResults","f":63031028,"d_finished":650,"c":2,"l":63032086,"d":650},{"a":63032093,"name":"Finish","f":63032093,"d_finished":0,"c":0,"l":63032970,"d":877}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2026-01-10T12:16:55.887542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:55.883181Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:55.887598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:55.887732Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047956.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047956.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047956.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047956.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047956.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148047956.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046756.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046756.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046756.000000s;Name=;Codec=}; 2026-01-10T12:15:56.786953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:15:56.819939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:15:56.820225Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:15:56.827761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:15:56.828009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:15:56.828271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:15:56.828411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:15:56.828514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:15:56.828645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:15:56.828753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:15:56.828879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:15:56.829006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:15:56.829116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.829215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:15:56.829323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:15:56.829438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:15:56.860202Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:15:56.860578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:15:56.860639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:15:56.860805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.860957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:15:56.861035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:15:56.861084Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:15:56.861153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:15:56.861236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:15:56.861281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:15:56.861310Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:15:56.861471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:15:56.861538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:15:56.861586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:15:56.861620Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:15:56.861716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:15:56.861788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:15:56.861834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:15:56.861868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:15:56.861920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:15:56.861962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:15:56.861996Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:15:56.862044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:15:56.862099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:15:56.862124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:15:56.862278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:15:56.862319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:15:56.862340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:15:56.862458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:15:56.862490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:15:56.862511Z nod ... ablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:16:55.875239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:16:55.875468Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047405108:max} readable: {1768047405108:max} at tablet 9437184 2026-01-10T12:16:55.875601Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:16:55.875782Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405108:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:55.875847Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405108:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:16:55.876390Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405108:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:16:55.878010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405108:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:16:55.878953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047405108:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2026-01-10T12:16:55.879404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:16:55.879632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:16:55.879865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.879998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.880343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:16:55.880455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.880555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.880730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2026-01-10T12:16:55.881061Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":63261480,"name":"_full_task","f":63261480,"d_finished":0,"c":0,"l":63263378,"d":1898},"events":[{"name":"bootstrap","f":63261730,"d_finished":889,"c":1,"l":63262619,"d":889},{"a":63262924,"name":"ack","f":63262924,"d_finished":0,"c":0,"l":63263378,"d":454},{"a":63262911,"name":"processing","f":63262911,"d_finished":0,"c":0,"l":63263378,"d":467},{"name":"ProduceResults","f":63262362,"d_finished":462,"c":2,"l":63263169,"d":462},{"a":63263173,"name":"Finish","f":63263173,"d_finished":0,"c":0,"l":63263378,"d":205}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:16:55.881113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:16:55.881408Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":63261480,"name":"_full_task","f":63261480,"d_finished":0,"c":0,"l":63263743,"d":2263},"events":[{"name":"bootstrap","f":63261730,"d_finished":889,"c":1,"l":63262619,"d":889},{"a":63262924,"name":"ack","f":63262924,"d_finished":0,"c":0,"l":63263743,"d":819},{"a":63262911,"name":"processing","f":63262911,"d_finished":0,"c":0,"l":63263743,"d":832},{"name":"ProduceResults","f":63262362,"d_finished":462,"c":2,"l":63263169,"d":462},{"a":63263173,"name":"Finish","f":63263173,"d_finished":0,"c":0,"l":63263743,"d":570}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2026-01-10T12:16:55.881479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:16:55.877981Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:16:55.881520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:16:55.881617Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous >> TestShred::ManualLaunch3Cycles [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC >> TestShred::SimpleTestForTables >> TestShred::ShredWithSplit [GOOD] >> TestShred::ShredWithCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithSplit [GOOD] Test command err: 2026-01-10T12:16:51.091577Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:60:2102] 2026-01-10T12:16:51.122692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:51.122787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:51.122839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:51.122878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:51.122927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:51.122961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:51.123012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:51.123090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:51.123886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:51.124235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:51.215562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:51.215628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:51.216566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:51.216690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:51.216889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:51.218915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:51.219179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:51.219808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:51.226114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:51.231596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:51.231818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:51.233151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:51.233210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:51.233423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:51.233472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:51.233511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:51.234579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:51.367409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.368968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:51.369544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... ::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.683978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.715967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.716027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.716157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.716178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.726589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.726647Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.726706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.726725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.758587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.758663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.758737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.758757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.769067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.769125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.769192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.769210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.801051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.801108Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:16:57.801243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.801266Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:16:57.811975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1377:3244], Recipient [1:499:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 989 Memory: 89229 Storage: 5024539 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-10T12:16:57.812027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:16:57.812089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.0989 2026-01-10T12:16:57.812183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:16:57.812211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:16:57.812374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1380:3246], Recipient [1:499:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 925 Memory: 89253 Storage: 5127032 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-10T12:16:57.812394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:16:57.812414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.0925 2026-01-10T12:16:57.812465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:16:57.822792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:16:57.822849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:16:57.822870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:16:57.823046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:16:57.823083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:16:57.823111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:16:57.823178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:16:57.823208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:16:57.823271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2026-01-10T12:16:57.823304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-10T12:16:57.823762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:16:57.825838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1851:3616], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:16:57.825881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:16:57.825913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:16:57.826046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:16:57.826106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:16:57.826142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system >> TestShred::Run3CyclesForTables [GOOD] >> TestShred::Run3CyclesForAllSupportedObjects >> Cdc::StringEscaping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestPQSmallRead >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TestShred::SimpleTestForAllSupportedObjects [GOOD] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:59.294900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:59.295018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:59.295058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:59.295111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:59.295149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:59.295182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:59.295252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:59.295332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:59.296204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:59.296507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:59.373104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:59.373152Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:59.383711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:59.383826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:59.383973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:59.392101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:59.392311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:59.392840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:59.393055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:59.395281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:59.395459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:59.396628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:59.396684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:59.396766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:59.396813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:59.396858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:59.397135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:59.511055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.511935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.512742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:00.438207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:00.438250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-10T12:17:00.438289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-10T12:17:00.440007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-10T12:17:00.440056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-10T12:17:00.440165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:17:00.440211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:00.440243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:17:00.440265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:00.440291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:17:00.440357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710760 2026-01-10T12:17:00.440394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:00.440436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:17:00.440461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:17:00.440517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:17:00.441912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:17:00.441963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:17:00.442023Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-10T12:17:00.442123Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:799:2769], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:17:00.443530Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-10T12:17:00.443632Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:799:2769], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:17:00.443689Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:17:00.445099Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-10T12:17:00.445221Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:799:2769], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:17:00.445260Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-10T12:17:00.445378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:00.445414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:890:2849] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:00.445925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:00.446165Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 258us result status StatusSuccess 2026-01-10T12:17:00.446625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:53.535938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:53.536041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:53.536102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:53.536146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:53.536184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:53.536218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:53.536300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:53.536367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:53.537256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:53.537538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:53.629435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:53.629511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:53.645313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:53.645455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:53.645664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:53.654067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:53.654319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:53.655089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:53.655334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:53.657695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:53.657886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:53.659115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:53.659174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:53.659243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:53.659290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:53.659332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:53.659612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:53.815502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.816539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.816677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.816765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.816876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.816944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:53.817693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:16:59.771565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:16:59.771966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3110:4723], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3111:4724] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:16:59.772060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:16:59.772115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-10T12:16:59.772283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-10T12:16:59.772321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:16:59.772358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:16:59.772413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:16:59.772451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:16:59.772504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:16:59.772562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:00.343243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1437:3298]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.343520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.343543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.343592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1437:3298], Recipient [1:1437:3298]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.343614Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.343661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.343681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.395363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:00.395439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:00.395484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:00.395653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-10T12:17:00.395678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:00.395702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:00.395754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:00.395784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:17:00.395826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:00.395875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:00.885845Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1437:3298]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.885926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.886008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.886038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.886100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.886128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:00.886211Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.886244Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.886328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.886362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.886430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1437:3298], Recipient [1:1437:3298]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.886458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:00.938149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:00.938213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:00.938235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:00.938416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:17:00.938464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:00.938498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:00.938566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:00.938601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:00.938685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.919000s, Timestamp# 1970-01-01T00:00:05.220000Z 2026-01-10T12:17:00.938732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-10T12:17:00.940959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:00.941461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3130:4743], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:00.941501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:00.941528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:00.941636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:685:2675], Recipient [1:702:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:00.941658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:00.941684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] >> TestShred::SimpleTestForTables [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:58.129129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:58.129187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:58.129216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:58.129258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:58.129283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:58.129302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:58.129351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:58.129400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:58.130015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:58.130207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:58.188965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:58.189012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:58.199465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:58.199545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:58.199676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:58.205888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:58.206061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:58.206566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:58.206721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:58.208364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:58.208509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:58.209337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:58.209382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:58.209430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:58.209463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:58.209490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:58.209670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:58.313204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.316840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.317881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:17:01.888825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:01.889244Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2766:4443], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2767:4444] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:17:01.889285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:17:01.889315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-10T12:17:01.889499Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-10T12:17:01.889546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:01.889583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:01.889644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:01.889680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:17:01.889730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:01.889786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:02.325633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325714Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1313:3200]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325870Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.325939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.325967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.326035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1313:3200], Recipient [1:1313:3200]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.326054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.326099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.326126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.367069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:02.367137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:02.367175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:02.367441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-10T12:17:02.367482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:02.367507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:02.367559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:02.367604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:17:02.367644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:02.367674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:02.730453Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1313:3200]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:02.730708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.730726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.730775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1313:3200], Recipient [1:1313:3200]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.730789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.730819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:871:2822], Recipient [1:871:2822]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.730833Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:02.771771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:02.771837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:02.771858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:02.772018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:17:02.772047Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:02.772071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:02.772139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:02.772163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:02.772228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.937000s, Timestamp# 1970-01-01T00:00:05.202000Z 2026-01-10T12:17:02.772276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-10T12:17:02.774039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:02.774511Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2790:4467], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:02.774550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:02.774576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:02.774643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:685:2675], Recipient [1:702:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:02.774665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:02.774688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 10911, MsgBus: 32739 2026-01-10T12:13:58.369872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705065344273236:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:58.371479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:58.829846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:58.943150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:58.943269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:58.952175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:59.041638Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705065344273201:2081] 1768047238358277 != 1768047238358280 2026-01-10T12:13:59.049023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:59.062734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:59.215717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:59.215746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:59.215764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:59.215940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:59.416247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:59.964040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:59.975201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:14:00.071292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.286690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.539098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:00.648613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.119807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705086819111558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.119966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.120360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705086819111568:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.120438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.369238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705065344273236:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:03.369325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:03.474257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.522388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.570208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.621826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.666724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.710936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.754751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.847667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.000598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705086819112440:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.000721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.001322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705091114079741:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.001377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705091114079742:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.001537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.006799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:04.032353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705091114079745:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:14:04.090447Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705091114079796:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:05.945809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:16.403953Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:16.476655Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:16.478002Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593705659423079328:2081] 1768047376386421 != 1768047376386424 2026-01-10T12:16:16.517103Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:16.517217Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:16.523232Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:16.564297Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:16.564328Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:16.564340Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:16.564456Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:16.584163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:17.154508Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:17.174516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.245820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.414814Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:17.495474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:17.585591Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.154600Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705680897917689:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.154713Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.154987Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705680897917698:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.155048Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.260822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.294881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.328031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.360238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.392174Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593705659423079357:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:21.392264Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:21.419054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.454957Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.489985Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.545800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:21.630566Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705680897918573:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.630658Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705680897918578:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.630657Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.630835Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593705680897918580:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.630895Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:21.634741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:21.647056Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593705680897918581:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:16:21.736061Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593705680897918633:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:24.084137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:31.459682Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:16:31.459714Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser >> TestShred::Run3CyclesForTopics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:31.470834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:31.470924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.470968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:31.471006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:31.471049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:31.471102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:31.471155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.471250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:31.473333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:31.473658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:31.650785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:31.650865Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:31.651869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:31.678159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:31.678653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:31.678863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:31.699522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:31.699866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:31.700709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.701199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:31.709057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:31.709286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:31.710684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:31.710752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:31.710820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:31.710873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:31.710921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:31.711107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.718578Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:31.867461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:31.867749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.867965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:31.868021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:31.868293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:31.868381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:31.871936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.872179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:31.872406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.872474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:31.872552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:31.872619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:31.875681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.875757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:31.875795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:31.878355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.878417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:31.878467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.878527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:31.882539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:31.885274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:31.885480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:31.886587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:31.886728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:31.886796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.887119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:31.887181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:31.887354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:31.887480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:31.890324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-10T12:17:02.487069Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-10T12:17:02.487094Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-10T12:17:02.487122Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-10T12:17:02.487145Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-10T12:17:02.487165Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-10T12:17:02.488470Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.488552Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.488591Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:02.488622Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:17:02.488656Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:17:02.489876Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.489945Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.489972Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:02.489997Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:17:02.490025Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:17:02.491353Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.491421Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.491447Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:02.491473Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-10T12:17:02.491502Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:17:02.491931Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.491992Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:02.492015Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:02.492039Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-10T12:17:02.492066Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-10T12:17:02.492146Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:17:02.494391Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:02.494536Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:02.495809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:02.495908Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:17:02.497185Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:17:02.497222Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:17:02.498522Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:17:02.498613Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.498654Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2681:4670] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:17:02.499744Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:17:02.499778Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:17:02.499842Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:17:02.499863Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:17:02.499909Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:17:02.499934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:17:02.499979Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:17:02.500001Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:17:02.500046Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:17:02.500068Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:17:02.501747Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:17:02.501843Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.501884Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2684:4673] 2026-01-10T12:17:02.502138Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:17:02.502247Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:17:02.502283Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.502314Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2684:4673] 2026-01-10T12:17:02.502436Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.502460Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2684:4673] 2026-01-10T12:17:02.502544Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:17:02.502654Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:17:02.502727Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.502759Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2684:4673] 2026-01-10T12:17:02.502876Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:17:02.502906Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2684:4673] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:47.975604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:47.975702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:47.975743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:47.975780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:47.975815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:47.975894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:47.975949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:47.976021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:47.976876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:47.977179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:48.064670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:48.064738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:48.080994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:48.081116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:48.081335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:48.090786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:48.091077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:48.091777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:48.092117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:48.094799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:48.095045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:48.096344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:48.096404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:48.096483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:48.096530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:48.096569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:48.096886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:48.204761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.205999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:48.206456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 19 ms, next wakeup# 593.981000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-10T12:17:02.857015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-10T12:17:02.857823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2026-01-10T12:17:02.858677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:17:02.858718Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:02.858878Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-10T12:17:02.858911Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:02.858936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:02.858974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:02.859020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:17:02.859074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:02.859117Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:03.510175Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510257Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510309Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510326Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510398Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510431Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:03.510500Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.510535Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.510594Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.510610Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.510654Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1257:3139], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.510679Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:03.520996Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:03.521055Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:03.521079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:03.521226Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-10T12:17:03.521247Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:03.521266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:03.521323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:03.521354Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:17:03.521394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:03.521424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:04.185905Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.185989Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.186069Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.186112Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.186190Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.186220Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.186287Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1257:3139], Recipient [2:1257:3139]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.186317Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.186400Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.186428Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.186488Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.186517Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.196947Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.197032Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.197067Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:04.197311Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-10T12:17:04.197351Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:04.197384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:04.197459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:04.197492Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:04.197555Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.980000s, Timestamp# 1970-01-01T00:00:11.158000Z 2026-01-10T12:17:04.197590Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-10T12:17:04.199806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:04.200493Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:2254:4050], Recipient [2:703:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:04.200564Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:04.200616Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:04.200792Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:685:2674], Recipient [2:703:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:04.200835Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:04.200875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:49.573974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:49.574040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:49.574074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:49.574104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:49.574135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:49.574179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:49.574234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:49.574287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:49.574982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:49.575181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:49.640781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:49.640834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:49.652962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:49.653048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:49.653205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:49.661158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:49.661416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:49.662166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:49.662391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:49.664797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:49.664962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:49.665817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:49.665859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:49.665906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:49.665939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:49.665968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:49.666176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:49.771282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.775739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.775878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.775944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:49.776513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nt [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 100 Completed: false Progress10k: 0 2026-01-10T12:17:03.682667Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:03.682719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:03.682788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:03.682841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:646: TTxCompleteShredBSC Unknown generation#100, Expected gen# 52 at schemestard: 72057594046678944 2026-01-10T12:17:03.682927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-10T12:17:03.683271Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 0 2026-01-10T12:17:03.683302Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:03.683325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:03.683356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:03.683380Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:17:03.684699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:03.684768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:03.684840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:04.192326Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.192398Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.192474Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.192501Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.202924Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.202994Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.203064Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.203093Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.203189Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.203221Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.203296Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1569:3430], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.203323Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.265424Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.265490Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.265531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-10T12:17:04.265766Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 5000 2026-01-10T12:17:04.265792Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:04.265815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:04.265870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:04.265903Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:17:04.265950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:04.266003Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:04.642554Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.642633Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.642721Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.642752Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.653240Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.653334Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.653417Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.653448Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:04.653531Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.653566Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.653647Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1569:3430], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.653677Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:04.716391Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.716476Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:04.716508Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-10T12:17:04.716745Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: true Progress10k: 10000 2026-01-10T12:17:04.716785Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:04.716817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:04.716889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:04.716925Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:04.716977Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 101, duration# 2 s 2026-01-10T12:17:04.719192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:04.719848Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4806:6090], Recipient [2:703:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:04.719920Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:04.719974Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:04.720145Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:3961:5415], Recipient [2:703:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:04.720190Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:04.720231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] Test command err: 2026-01-10T12:16:58.082698Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:60:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:60:2102] 2026-01-10T12:16:58.107903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:58.107980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:58.108031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:58.108066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:58.108140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:58.108176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:58.108231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:58.108286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:58.108880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:58.109063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:58.168164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:58.168220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:58.168890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:58.168975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:58.169095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:58.170534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:58.170782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:58.171402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:58.177621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:58.181299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:58.181434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:58.182330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:58.182384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:58.182530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:58.182568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:58.182595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:58.183268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:58.291206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.291855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.291959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:58.292617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... e_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2026-01-10T12:17:05.646011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2026-01-10T12:17:05.646051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2026-01-10T12:17:05.646082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-10T12:17:05.646154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-01-10T12:17:05.656897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:17:05.656996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:17:05.657035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2026-01-10T12:17:05.678802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.678891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.678997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.679026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.689545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.689637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.689762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.689804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.722034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.722104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.722184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.722225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.732718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.732793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.732875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.732898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.765400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.765497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.765662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.765708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.776278Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.776369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.776456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.776480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.808949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.809034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.809158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.809191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.819675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.819755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.819843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:499:2462], Recipient [1:499:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.819868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.852029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.852123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:05.852210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.852232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:05.862708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:05.862785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:05.862817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:17:05.863042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:17:05.863078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:05.863111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:05.863191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:05.863226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:05.863295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2026-01-10T12:17:05.863334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 35 s 2026-01-10T12:17:05.863941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:05.866774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2109:3822], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:05.866832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:05.866863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:05.867017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:05.867049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:05.867107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:27.592659Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:27.598348Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:27.598880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:27.635457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:27.635736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:27.643872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:27.644167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:27.644435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:27.644562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:27.644688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:27.644801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:27.644935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:27.645098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:27.645222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:27.645341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:27.645496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:27.645614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:27.645792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:27.675677Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:27.680608Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:27.680993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:27.681068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:27.681311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:27.681510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:27.681603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:27.681653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:27.681776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:27.681852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:27.681908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:27.681951Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:27.682149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:27.682229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:27.682275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:27.682330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:27.682437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:27.682505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:27.682549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:27.682605Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:27.682666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:27.682705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:27.682743Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:27.682813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:27.682856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:27.682949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:27.683191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:27.683255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:27.683296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:27.683444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:27.683495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:27.683532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:27.683583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:27.683624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:27.683654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:27.683731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:27.683793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... s;result=1;count=1000;finished=1; 2026-01-10T12:17:06.306616Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:17:06.306642Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:17:06.306889Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:06.307083Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.307126Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:06.307300Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:17:06.307375Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-10T12:17:06.307631Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:616:2595];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-10T12:17:06.307797Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.307962Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.308170Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.308405Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:06.308598Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.308741Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.308976Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:617:2596] finished for tablet 9437184 2026-01-10T12:17:06.309470Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:616:2595];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":39235222,"name":"_full_task","f":39235222,"d_finished":0,"c":0,"l":39242637,"d":7415},"events":[{"name":"bootstrap","f":39235438,"d_finished":1172,"c":1,"l":39236610,"d":1172},{"a":39241968,"name":"ack","f":39240455,"d_finished":1346,"c":1,"l":39241801,"d":2015},{"a":39241954,"name":"processing","f":39236746,"d_finished":3043,"c":3,"l":39241804,"d":3726},{"name":"ProduceResults","f":39236185,"d_finished":2218,"c":6,"l":39242350,"d":2218},{"a":39242355,"name":"Finish","f":39242355,"d_finished":0,"c":0,"l":39242637,"d":282},{"name":"task_result","f":39236761,"d_finished":1641,"c":2,"l":39240244,"d":1641}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.309614Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:616:2595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:06.310036Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:616:2595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":39235222,"name":"_full_task","f":39235222,"d_finished":0,"c":0,"l":39243244,"d":8022},"events":[{"name":"bootstrap","f":39235438,"d_finished":1172,"c":1,"l":39236610,"d":1172},{"a":39241968,"name":"ack","f":39240455,"d_finished":1346,"c":1,"l":39241801,"d":2622},{"a":39241954,"name":"processing","f":39236746,"d_finished":3043,"c":3,"l":39241804,"d":4333},{"name":"ProduceResults","f":39236185,"d_finished":2218,"c":6,"l":39242350,"d":2218},{"a":39242355,"name":"Finish","f":39242355,"d_finished":0,"c":0,"l":39243244,"d":889},{"name":"task_result","f":39236761,"d_finished":1641,"c":2,"l":39240244,"d":1641}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:06.310099Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:06.300108Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-10T12:17:06.310141Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:06.310278Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:05.814727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:05.814807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:05.814839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:05.814885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:05.814919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:05.814939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:05.814987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:05.815040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:05.815670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:05.815868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:05.875567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:05.875616Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:05.886822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:05.886935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:05.887069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:05.893965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:05.894147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:05.894675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:05.894860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:05.896980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:05.897182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:05.898033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:05.898075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:05.898123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:05.898155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:05.898188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:05.898419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:05.996543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:05.997993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:06.757240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:06.757342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:06.757375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:17:06.757414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:17:06.757469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:17:06.757565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:17:06.759735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:17:06.782431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 859 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:06.782480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:06.782604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 859 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:06.782703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 859 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:17:06.783595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:06.783630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:06.783746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:06.783795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:06.783880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:06.783945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:06.783979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:06.784015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:06.784048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:06.786125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:06.786401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:06.786591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:06.786651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:06.786727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:06.786754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:06.786815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:06.786848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:06.786872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:06.786914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2755] message: TxId: 102 2026-01-10T12:17:06.786954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:06.786985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:06.787008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:06.787088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:06.788476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:06.788527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:874:2833] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:06.788936Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:06.789157Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 197us result status StatusSuccess 2026-01-10T12:17:06.789530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:07.013534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:07.013627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:07.013664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:07.013699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:07.013733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:07.013755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:07.013805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:07.013875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:07.014624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:07.014833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:07.088503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:07.088559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:07.099633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:07.099732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:07.099878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:07.107057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:07.107230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:07.107969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:07.108233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:07.110581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:07.110762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:07.111921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:07.111981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:07.112051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:07.112118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:07.112170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:07.112461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.216037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.216742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.216823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.216882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.216927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.216967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.217435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:07.711520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:17:07.711684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.711747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:07.711797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:07.711856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:07.713586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.713629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:07.713657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:07.714911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.714944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.714978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:07.715027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:07.715174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:07.716426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:07.716533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:07.716795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:07.716887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:07.716960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:07.717186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:07.717231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:07.717356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:07.717407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:07.718894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:07.718930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:07.719096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:07.719137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:07.719375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.719409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:07.719497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:07.719524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:07.719562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:07.719599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:07.719682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:07.719718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:07.719757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:07.719788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:07.719839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:07.719872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:07.719893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:07.720538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:07.720633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:07.720660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:07.720688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:07.720724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:07.720808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:07.723438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:07.723783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:07.724880Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:17:07.725775Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:17:07.728543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:07.728879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.729053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2026-01-10T12:17:07.729517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-10T12:17:07.730281Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:07.733791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:07.734050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:07.734582Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:38.794973Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:38.798916Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:38.799278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:38.821038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:38.821262Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:38.827392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:38.827606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:38.827806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:38.827904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:38.827984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:38.828093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:38.828174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:38.828262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:38.828332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:38.828401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.828480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:38.828594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:38.828696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:38.846211Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:38.849535Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:38.849795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:38.849840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:38.850020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.850187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:38.850271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:38.850310Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:38.850412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:38.850484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:38.850524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:38.850557Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:38.850696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.850748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:38.850774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:38.850792Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:38.850857Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:38.850900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:38.850937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:38.850960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:38.851000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:38.851049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:38.851079Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:38.851120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:38.851150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:38.851211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:38.851367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:38.851415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:38.851448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:38.851534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:38.851561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.851587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.851624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:38.851664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:38.851683Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:38.851714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:38.851740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:07.690766Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:17:07.690824Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:17:07.691056Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:07.691279Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.691341Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:07.691557Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:17:07.691639Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-10T12:17:07.691945Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-10T12:17:07.692190Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.692351Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.692554Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.692754Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:07.692879Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.692986Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.693308Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-10T12:17:07.693807Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":29312602,"name":"_full_task","f":29312602,"d_finished":0,"c":0,"l":29325078,"d":12476},"events":[{"name":"bootstrap","f":29312831,"d_finished":1046,"c":1,"l":29313877,"d":1046},{"a":29324430,"name":"ack","f":29321065,"d_finished":3058,"c":2,"l":29324287,"d":3706},{"a":29324413,"name":"processing","f":29314009,"d_finished":5803,"c":5,"l":29324291,"d":6468},{"name":"ProduceResults","f":29313500,"d_finished":3991,"c":9,"l":29324701,"d":3991},{"a":29324707,"name":"Finish","f":29324707,"d_finished":0,"c":0,"l":29325078,"d":371},{"name":"task_result","f":29314028,"d_finished":2637,"c":3,"l":29320818,"d":2637}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.693903Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:07.694472Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":29312602,"name":"_full_task","f":29312602,"d_finished":0,"c":0,"l":29325655,"d":13053},"events":[{"name":"bootstrap","f":29312831,"d_finished":1046,"c":1,"l":29313877,"d":1046},{"a":29324430,"name":"ack","f":29321065,"d_finished":3058,"c":2,"l":29324287,"d":4283},{"a":29324413,"name":"processing","f":29314009,"d_finished":5803,"c":5,"l":29324291,"d":7045},{"name":"ProduceResults","f":29313500,"d_finished":3991,"c":9,"l":29324701,"d":3991},{"a":29324707,"name":"Finish","f":29324707,"d_finished":0,"c":0,"l":29325655,"d":948},{"name":"task_result","f":29314028,"d_finished":2637,"c":3,"l":29320818,"d":2637}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:07.694598Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:07.679073Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-10T12:17:07.694655Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:07.694827Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:08.040925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:08.041030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:08.041064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:08.041108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:08.041144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:08.041167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:08.041231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:08.041301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:08.042081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:08.042342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:08.112593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:08.112635Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:08.123579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:08.123676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:08.123833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:08.131218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:08.131422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:08.132143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:08.132363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:08.134521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:08.134706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:08.135561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:08.135604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:08.135648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:08.135679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:08.135710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:08.135929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:08.239009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.239936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:08.240468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:08.697785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:08.697886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:08.698965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:08.699046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:08.699276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:08.699349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:08.699376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:08.699588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:08.699629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:08.699778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:08.699835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:08.701043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:08.701089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:08.701218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:08.701251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:17:08.701315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:08.701342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:08.701428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:08.701456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:08.701480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:08.701502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:08.701534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:08.701563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:08.701601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:08.701626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:08.701668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:08.701703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:08.701771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:08.702236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:08.702333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:08.702365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:08.702389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:08.702417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:08.702470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:08.704506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:08.704867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1768047428.705775 283495 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-10T12:17:08.706130Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2666] Bootstrap 2026-01-10T12:17:08.706919Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2666] Become StateWork (SchemeCache [1:681:2671]) 2026-01-10T12:17:08.708985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:08.709258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:08.709544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-10T12:17:08.710179Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2666] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:08.712275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:08.712467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-10T12:17:08.712901Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1768047428.713256 283495 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-10T12:17:08.715049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:08.715246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:08.715403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2026-01-10T12:17:08.716740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:08.716845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:52.376642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:52.376712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:52.376742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:52.376773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:52.376802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:52.376841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:52.376885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:52.376945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:52.377608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:52.377809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:52.455408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:52.455469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:52.468128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:52.468226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:52.468379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:52.474449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:52.474643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:52.475185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:52.475340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:52.477115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:52.477284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:52.478112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:52.478154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:52.478204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:52.478236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:52.478264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:52.478457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:52.589058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.589791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.589931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:52.590796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 77 ms, next wakeup# 593.923000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-10T12:17:08.447450Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-10T12:17:08.449687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2026-01-10T12:17:08.450563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:17:08.450604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:08.450794Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-10T12:17:08.450838Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:08.450868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:08.450916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:08.450948Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:17:08.450996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:08.451044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:08.985330Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985397Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985452Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985472Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985506Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985523Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:08.985573Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1569:3430], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:08.985594Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:08.985649Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:08.985667Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:08.985700Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:08.985717Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.006514Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:09.006598Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:09.006632Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:09.006868Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-10T12:17:09.006910Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:09.006940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:09.007038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:09.007085Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:17:09.007150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:17:09.007204Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:17:09.504721Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504788Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504853Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504885Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504921Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504938Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:17:09.504985Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:703:2686], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.505007Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.505056Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:865:2818], Recipient [2:865:2818]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.505074Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.505121Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1569:3430], Recipient [2:1569:3430]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.505137Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:17:09.525916Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:703:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:09.525989Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:17:09.526012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-10T12:17:09.526185Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:705:2687], Recipient [2:703:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-10T12:17:09.526210Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:17:09.526231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:17:09.526286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:17:09.526308Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:17:09.526344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.921000s, Timestamp# 1970-01-01T00:00:11.217000Z 2026-01-10T12:17:09.526369Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-10T12:17:09.528132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:17:09.528689Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4848:6132], Recipient [2:703:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:09.528746Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:17:09.528777Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:17:09.528897Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:685:2674], Recipient [2:703:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:17:09.528924Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:17:09.528955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:31.161925Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:31.166491Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:31.166977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:31.201009Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:31.201247Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:31.208802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:31.209060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:31.209298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:31.209440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:31.209554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:31.209658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:31.209764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:31.209902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:31.210011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:31.210118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.210236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:31.210364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:31.210503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:31.237138Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:31.241287Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:31.241632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:31.241708Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:31.241932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:31.242113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:31.242197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:31.242237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:31.242350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:31.242412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:31.242460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:31.242499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:31.242688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:31.242756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:31.242796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:31.242825Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:31.242919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:31.242973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:31.243012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:31.243045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:31.243126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:31.243188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:31.243225Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:31.243272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:31.243307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:31.243394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:31.243641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:31.243713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:31.243753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:31.243894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:31.243938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.243972Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.244024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:31.244063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:31.244114Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:31.244162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:31.244206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ;result=1;count=1000;finished=1; 2026-01-10T12:17:09.559683Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:17:09.559726Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:17:09.559993Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:09.560217Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.560269Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:09.560504Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:17:09.560596Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-10T12:17:09.560944Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-10T12:17:09.561178Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.561329Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.561490Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.561681Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:09.561868Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.562029Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.562306Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:759:2739] finished for tablet 9437184 2026-01-10T12:17:09.562919Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:758:2738];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":38818600,"name":"_full_task","f":38818600,"d_finished":0,"c":0,"l":38826881,"d":8281},"events":[{"name":"bootstrap","f":38818832,"d_finished":1288,"c":1,"l":38820120,"d":1288},{"a":38826168,"name":"ack","f":38824477,"d_finished":1567,"c":1,"l":38826044,"d":2280},{"a":38826154,"name":"processing","f":38820303,"d_finished":3555,"c":3,"l":38826049,"d":4282},{"name":"ProduceResults","f":38819661,"d_finished":2678,"c":6,"l":38826568,"d":2678},{"a":38826572,"name":"Finish","f":38826572,"d_finished":0,"c":0,"l":38826881,"d":309},{"name":"task_result","f":38820323,"d_finished":1926,"c":2,"l":38824259,"d":1926}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.563000Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:09.563603Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:758:2738];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":38818600,"name":"_full_task","f":38818600,"d_finished":0,"c":0,"l":38827572,"d":8972},"events":[{"name":"bootstrap","f":38818832,"d_finished":1288,"c":1,"l":38820120,"d":1288},{"a":38826168,"name":"ack","f":38824477,"d_finished":1567,"c":1,"l":38826044,"d":2971},{"a":38826154,"name":"processing","f":38820303,"d_finished":3555,"c":3,"l":38826049,"d":4973},{"name":"ProduceResults","f":38819661,"d_finished":2678,"c":6,"l":38826568,"d":2678},{"a":38826572,"name":"Finish","f":38826572,"d_finished":0,"c":0,"l":38827572,"d":1000},{"name":"task_result","f":38820323,"d_finished":1926,"c":2,"l":38824259,"d":1926}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:09.563715Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:09.552050Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59288;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59288;selected_rows=0; 2026-01-10T12:17:09.601505Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:09.601900Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:09.431713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:09.431797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.431838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:09.431883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:09.431915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:09.431942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:09.432016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.432109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:09.432917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:09.433177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:09.522397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:09.522443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:09.537108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:09.537207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:09.537380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:09.545076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:09.545261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:09.545738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:09.545900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:09.547644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.547796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:09.548721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:09.548768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.548815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:09.548856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:09.548887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:09.549089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:09.673739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.674710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.674873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.674951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.675840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-10T12:17:10.437310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.437337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:10.437407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.437477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:10.437522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.437596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:10.437627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.437646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:10.437665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:10.440361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.440554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.444012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.444205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.444329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:10.444424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.446768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:10.446912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:10.446941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-10T12:17:10.447008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-10T12:17:10.447044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-10T12:17:10.447076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-10T12:17:10.447106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-10T12:17:10.447135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-10T12:17:10.447482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.447811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.447847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:10.447892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:17:10.447911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:10.447929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:17:10.447943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:10.447974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-10T12:17:10.448022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2755] message: TxId: 101 2026-01-10T12:17:10.448055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:10.448118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:10.448145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:10.448231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:10.448258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-10T12:17:10.448272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-10T12:17:10.448288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:17:10.448302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-10T12:17:10.448313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-10T12:17:10.448339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:17:10.450111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:10.450158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:789:2756] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:10.450475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:10.450648Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 187us result status StatusSuccess 2026-01-10T12:17:10.451030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:09.441377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:09.441460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.441489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:09.441526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:09.441551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:09.441572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:09.441618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.441668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:09.442442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:09.442644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:09.510749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:09.510816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:09.522350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:09.522437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:09.522550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:09.528920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:09.529102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:09.529618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:09.529781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:09.531595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.531736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:09.532611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:09.532653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.532695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:09.532725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:09.532756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:09.532963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:09.666901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.667817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.667972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.668911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.669042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.669138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0T12:17:10.555230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.555277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:10.555744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:10.555877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:10.555931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:17:10.555967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:17:10.555997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:10.556093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:17:10.557110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 858 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:10.557182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:10.557332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 858 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:10.557437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 858 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:17:10.558859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:10.558910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:10.559040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:10.559095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:10.559188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:10.559266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:10.559302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.559335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:10.559405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:10.560247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:17:10.561986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.562134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.562458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.562527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:10.562627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:10.562660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:10.562699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:10.562730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:10.562765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:10.562829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:17:10.562877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:10.562918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:10.562956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:10.563078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:10.564843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:10.564891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:808:2778] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:10.565453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:10.565754Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 280us result status StatusSuccess 2026-01-10T12:17:10.566266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:09.767002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:09.767116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.767176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:09.767225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:09.767267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:09.767296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:09.767377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.767464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:09.768169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:09.768379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:09.838122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:09.838171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:09.849776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:09.849905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:09.850119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:09.856436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:09.856621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:09.857166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:09.857324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:09.858983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.859138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:09.859948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:09.859989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.860032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:09.860069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:09.860123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:09.860311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:09.972574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.973980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.974696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1], version: 8 2026-01-10T12:17:10.713910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:10.714418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:10.714509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:10.714540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:10.714569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:17:10.714599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:10.714665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:17:10.715455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1324 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:10.715495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:10.715652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1324 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:10.715785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1324 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:17:10.716570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.716626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:10.716761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.716820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:10.716927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:10.717010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:10.717058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.717101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:10.717151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:10.720737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.721038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:10.721602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.721984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.722232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:10.722282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:10.722395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:10.722436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:10.722497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:10.722538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:10.722579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:10.722651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 101 2026-01-10T12:17:10.722704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:10.722742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:10.722777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:10.722916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:10.724566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:10.724644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:749:2727] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:10.725145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:10.725365Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 227us result status StatusSuccess 2026-01-10T12:17:10.725871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047984.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168047984.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047984.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148047984.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046784.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046784.000000s;Name=;Codec=}; 2026-01-10T12:16:26.134785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:26.169197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:26.169398Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:26.175601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:26.175812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:26.176066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:26.176192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:26.176271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:26.176360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:26.176441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:26.176507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:26.176599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:26.176676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.176741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:26.176819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:26.176913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:26.202859Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:26.203260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:26.203340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:26.203544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.203678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:26.203750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:26.203800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:26.203946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:26.204050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:26.204126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:26.204167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:26.204360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.204430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:26.204475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:26.204508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:26.204612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:26.204700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:26.204765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:26.204799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:26.204859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:26.204901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:26.204936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:26.204987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:26.205067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:26.205110Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:26.205335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:26.205413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:26.205455Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:26.205587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:26.205635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.205667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.205716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:26.205758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:26.205791Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:26.205848Z node 1 :TX_COLUM ... n=unexpected on destructor; 2026-01-10T12:17:11.584481Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:11.585240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-10T12:17:11.585437Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047422277:max} readable: {1768047422277:max} at tablet 9437184 2026-01-10T12:17:11.585535Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:11.585690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047422277:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:11.585747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047422277:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:11.586175Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047422277:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:11.587591Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047422277:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:11.588305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047422277:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-10T12:17:11.588695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:11.588861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:11.589051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:11.589229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:11.589423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:11.589524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:11.589625Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:11.589842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-10T12:17:11.590401Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":47305090,"name":"_full_task","f":47305090,"d_finished":0,"c":0,"l":47306747,"d":1657},"events":[{"name":"bootstrap","f":47305307,"d_finished":785,"c":1,"l":47306092,"d":785},{"a":47306241,"name":"ack","f":47306241,"d_finished":0,"c":0,"l":47306747,"d":506},{"a":47306229,"name":"processing","f":47306229,"d_finished":0,"c":0,"l":47306747,"d":518},{"name":"ProduceResults","f":47305801,"d_finished":495,"c":2,"l":47306481,"d":495},{"a":47306486,"name":"Finish","f":47306486,"d_finished":0,"c":0,"l":47306747,"d":261}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:11.590513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:11.590902Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47305090,"name":"_full_task","f":47305090,"d_finished":0,"c":0,"l":47307402,"d":2312},"events":[{"name":"bootstrap","f":47305307,"d_finished":785,"c":1,"l":47306092,"d":785},{"a":47306241,"name":"ack","f":47306241,"d_finished":0,"c":0,"l":47307402,"d":1161},{"a":47306229,"name":"processing","f":47306229,"d_finished":0,"c":0,"l":47307402,"d":1173},{"name":"ProduceResults","f":47305801,"d_finished":495,"c":2,"l":47306481,"d":495},{"a":47306486,"name":"Finish","f":47306486,"d_finished":0,"c":0,"l":47307402,"d":916}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-10T12:17:11.590977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:11.587570Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:11.591016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:11.591106Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:10.902765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:10.902869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:10.902922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:10.902976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:10.903012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:10.903040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:10.903109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:10.903189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:10.904039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:10.904391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:10.978566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:10.978637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:10.991322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:10.991419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:10.991568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:10.998632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:10.998795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:10.999281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:10.999452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:11.001175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.001321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:11.002145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:11.002190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.002231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:11.002261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:11.002290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:11.002481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.113854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.114955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.115326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... thId: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:11.655084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:17:11.655282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.655352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:11.655393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:11.655435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:11.657224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.657283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:11.657330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:11.658984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.659032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.659095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:11.659182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:11.659327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:11.661003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:11.661139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:11.661500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:11.661643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:11.661705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:11.662003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:11.662080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:11.662230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:11.662303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:11.664136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:11.664189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:11.664384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.664477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:11.664847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.664915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:11.665073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:11.665114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:11.665157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:11.665190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:11.665227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:11.665270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:11.665306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:11.665348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:11.665444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:11.665489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:11.665532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:11.666177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:11.666288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:11.666331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:11.666371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:11.666433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:11.666523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:11.669650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:11.670163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:11.671628Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:17:11.672803Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:17:11.675963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:11.676373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.676547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2026-01-10T12:17:11.677031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2026-01-10T12:17:11.677828Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:11.680912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:11.681151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:11.681649Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:11.125112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:11.125188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:11.125218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:11.125253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:11.125278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:11.125299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:11.125341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:11.125400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:11.126054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:11.126251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:11.190143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:11.190199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:11.206082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:11.206216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:11.206393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:11.214720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:11.214964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:11.215703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:11.215951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:11.218445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.218623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:11.219965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:11.220029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.220122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:11.220167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:11.220211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:11.220520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.362148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.363965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.364066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.364160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 823 RawX2: 4294970086 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.196981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:12.197132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 823 RawX2: 4294970086 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.197200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:12.197322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 823 RawX2: 4294970086 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.197411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:12.197474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-10T12:17:12.199520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.199918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.211898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.211948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:12.212108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.212173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:12.212247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:12.212314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:12.212363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.212421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:12.212459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:12.212527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:12.214152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.214420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.214474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-10T12:17:12.214518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-10T12:17:12.214557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-10T12:17:12.214646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-10T12:17:12.214683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-10T12:17:12.216446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.216508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:12.216627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:12.216669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:12.216719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:12.216750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:12.216785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:12.216841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:17:12.216881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:12.216915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:12.216945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:12.217062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:17:12.217109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:12.218726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:12.218772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:853:2812] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:12.219240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:12.219456Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 235us result status StatusSuccess 2026-01-10T12:17:12.219899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:13.780854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:13.780957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.781007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:13.781062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:13.781106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:13.781136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:13.781226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.781331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:13.782212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:13.782443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:13.857114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:13.857160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:13.868817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:13.868915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:13.869064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:13.876762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:13.876957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:13.877733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:13.877937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:13.879889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.880059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:13.881217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:13.881276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.881357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:13.881405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:13.881466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:13.881718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.002187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.003979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.004549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:14.514397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:17:14.514549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.514592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:14.514637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:14.514674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:14.515982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.516028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:14.516060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:14.517469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.517505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.517547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:14.517601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:14.517719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:14.518891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:14.518985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:14.519277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:14.519383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:14.519422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:14.519651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:14.519693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:14.519805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:14.519870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:14.521473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:14.521521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:14.521664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:14.521709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:14.521948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.521982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:14.522051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:14.522077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:14.522111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:14.522144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:14.522215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:14.522248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:14.522297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:14.522330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:14.522394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:14.522431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:14.522457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:14.522850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:14.522953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:14.522986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:14.523014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:14.523048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:14.523113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:14.525370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:14.525738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:14.526775Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:17:14.527612Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:17:14.529720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:14.529984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.530092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2026-01-10T12:17:14.530439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2026-01-10T12:17:14.530950Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:14.533779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:14.533993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:14.534508Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:13.871300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:13.871412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.871452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:13.871499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:13.871535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:13.871564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:13.871655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.871727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:13.872598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:13.872872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:13.959488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:13.959540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:13.974300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:13.974425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:13.974588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:13.982322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:13.982517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:13.983212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:13.983409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:13.985595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.985792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:13.986879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:13.986933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.986994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:13.987036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:13.987099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:13.987343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.137935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.138902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.139929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.140041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.140136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ep5000039 State->FrontStep: 5000039 2026-01-10T12:17:14.930961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.931038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:14.931657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:14.931753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:14.931792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:17:14.931840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:17:14.931881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:14.931970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:17:14.934798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:17:14.947144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:14.947210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:14.947436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:14.947585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:17:14.949011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:14.949068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:14.949224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:14.949316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:14.949408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:14.949498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:14.949548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.949599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:14.949649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:14.952034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.952494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.952854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.952919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:14.953027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:14.953064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:14.953119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:14.953162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:14.953202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:14.953270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:17:14.953320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:14.953359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:14.953399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:14.953531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:14.955347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:14.955404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:808:2778] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:14.955998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:14.956268Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 285us result status StatusSuccess 2026-01-10T12:17:14.956846Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:09.088626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:09.088736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.088783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:09.088838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:09.088884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:09.088918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:09.089006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:09.089089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:09.089986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:09.090299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:09.180475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:09.180541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:09.196434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:09.196593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:09.196778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:09.205146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:09.205384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:09.206202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:09.206447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:09.208703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.208924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:09.210134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:09.210205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:09.210299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:09.210351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:09.210397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:09.210690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:09.355508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.356993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:09.357045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :17:15.095159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.095832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:15.095918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:15.095946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:15.095979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:15.096020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:15.096060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:15.096150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3125:4346] message: TxId: 101 2026-01-10T12:17:15.096190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:15.096238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:15.096272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:15.097184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-10T12:17:15.099781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:15.099819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:3126:4347] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:15.100299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:15.100520Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 271us result status StatusSuccess 2026-01-10T12:17:15.100993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1768047435.101680 283733 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-10T12:17:15.104131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:15.104308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.104684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2026-01-10T12:17:15.106613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:15.106850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047986.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168047986.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047986.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148047986.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046786.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046786.000000s;Name=;Codec=}; 2026-01-10T12:16:28.683545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:28.707861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:28.708057Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:28.713962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:28.714181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:28.714353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:28.714432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:28.714495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:28.714591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:28.714666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:28.714730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:28.714849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:28.714927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.714989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:28.715051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:28.715121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:28.742023Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:28.742385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:28.742446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:28.742619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.742785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:28.742871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:28.742938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:28.743056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:28.743129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:28.743174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:28.743209Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:28.743401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.743469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:28.743513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:28.743546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:28.743668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:28.743748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:28.743809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:28.743841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:28.743891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:28.743932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:28.743962Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:28.744031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:28.744094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:28.744123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:28.744326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:28.744378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:28.744419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:28.744571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:28.744624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.744656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.744708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:28.744755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:28.744792Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:28.744842Z node 1 :TX_COLUM ... son=unexpected on destructor; 2026-01-10T12:17:15.673479Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:15.674093Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-10T12:17:15.674547Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047424827:max} readable: {1768047424827:max} at tablet 9437184 2026-01-10T12:17:15.674674Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:15.674823Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047424827:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:15.674871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047424827:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:15.675296Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047424827:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:15.676738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047424827:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:15.677760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047424827:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-10T12:17:15.678163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:15.678328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:15.678543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:15.678716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:15.678937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:15.679071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:15.679169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:15.679384Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-10T12:17:15.679767Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":49045072,"name":"_full_task","f":49045072,"d_finished":0,"c":0,"l":49046827,"d":1755},"events":[{"name":"bootstrap","f":49045321,"d_finished":842,"c":1,"l":49046163,"d":842},{"a":49046316,"name":"ack","f":49046316,"d_finished":0,"c":0,"l":49046827,"d":511},{"a":49046303,"name":"processing","f":49046303,"d_finished":0,"c":0,"l":49046827,"d":524},{"name":"ProduceResults","f":49045834,"d_finished":544,"c":2,"l":49046584,"d":544},{"a":49046588,"name":"Finish","f":49046588,"d_finished":0,"c":0,"l":49046827,"d":239}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:15.679855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:15.680214Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":49045072,"name":"_full_task","f":49045072,"d_finished":0,"c":0,"l":49047288,"d":2216},"events":[{"name":"bootstrap","f":49045321,"d_finished":842,"c":1,"l":49046163,"d":842},{"a":49046316,"name":"ack","f":49046316,"d_finished":0,"c":0,"l":49047288,"d":972},{"a":49046303,"name":"processing","f":49046303,"d_finished":0,"c":0,"l":49047288,"d":985},{"name":"ProduceResults","f":49045834,"d_finished":544,"c":2,"l":49046584,"d":544},{"a":49046588,"name":"Finish","f":49046588,"d_finished":0,"c":0,"l":49047288,"d":700}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-10T12:17:15.680283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:15.676708Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:15.680319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:15.680436Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:14.803242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:14.803321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:14.803361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:14.803418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:14.803448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:14.803473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:14.803523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:14.803593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:14.804374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:14.804643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:14.872326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:14.872376Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:14.886241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:14.886384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:14.886569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:14.895869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:14.896143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:14.896931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:14.897172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:14.899639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:14.899838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:14.901087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:14.901153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:14.901230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:14.901283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:14.901331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:14.901648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.056123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.056930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:15.057710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:17:15.921208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.921274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:15.921750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:17:15.921875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:17:15.921927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:17:15.921985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:17:15.922033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:15.922140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-10T12:17:15.925166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:17:15.937249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1082 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-10T12:17:15.937299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:15.937444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1082 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-10T12:17:15.937595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1082 } } CommitVersion { Step: 5000040 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:17:15.938307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:17:15.938349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:15.938451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:17:15.938540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:15.938637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:17:15.938711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:15.938743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.938774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:15.938805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-10T12:17:15.940990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.941311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.941429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:15.941482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:17:15.941594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:17:15.941683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:17:15.941718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:17:15.941745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:17:15.941772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-10T12:17:15.941817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 104 2026-01-10T12:17:15.941871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:17:15.941912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:17:15.941938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:17:15.942035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:15.943449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:17:15.943501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:856:2826] TestWaitNotification: OK eventTxId 104 2026-01-10T12:17:15.944018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:15.944252Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 212us result status StatusSuccess 2026-01-10T12:17:15.944623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047988.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168047988.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047988.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148047988.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046788.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046788.000000s;Name=;Codec=}; 2026-01-10T12:16:30.014544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:30.051030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:30.051303Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:30.059311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:30.059584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:30.059839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:30.059966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:30.060102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:30.060254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:30.060380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:30.060487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:30.060649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:30.060780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.060888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:30.060998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:30.061151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:30.093656Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:30.094033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:30.094096Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:30.094285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:30.094433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:30.094517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:30.094552Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:30.094636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:30.094698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:30.094733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:30.094765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:30.094882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:30.094922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:30.094948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:30.094966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:30.095025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:30.095086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:30.095136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:30.095162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:30.095197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:30.095222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:30.095240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:30.095275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:30.095311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:30.095332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:30.095490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:30.095524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:30.095563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:30.095653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:30.095681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.095711Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.095750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:30.095786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:30.095817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:30.095851Z node 1 :TX_COLUM ... son=unexpected on destructor; 2026-01-10T12:17:16.163563Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:16.164211Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-10T12:17:16.164624Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047426164:max} readable: {1768047426164:max} at tablet 9437184 2026-01-10T12:17:16.164748Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:16.164913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047426164:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:16.164973Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047426164:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:16.165376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047426164:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:16.166815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047426164:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:16.167611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047426164:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2026-01-10T12:17:16.168014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:16.168227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:16.168507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.168703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.168942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:16.169052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.169136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.169316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2026-01-10T12:17:16.169714Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":48344221,"name":"_full_task","f":48344221,"d_finished":0,"c":0,"l":48346040,"d":1819},"events":[{"name":"bootstrap","f":48344456,"d_finished":961,"c":1,"l":48345417,"d":961},{"a":48345601,"name":"ack","f":48345601,"d_finished":0,"c":0,"l":48346040,"d":439},{"a":48345589,"name":"processing","f":48345589,"d_finished":0,"c":0,"l":48346040,"d":451},{"name":"ProduceResults","f":48345059,"d_finished":543,"c":2,"l":48345827,"d":543},{"a":48345831,"name":"Finish","f":48345831,"d_finished":0,"c":0,"l":48346040,"d":209}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.169799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:16.170133Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":48344221,"name":"_full_task","f":48344221,"d_finished":0,"c":0,"l":48346512,"d":2291},"events":[{"name":"bootstrap","f":48344456,"d_finished":961,"c":1,"l":48345417,"d":961},{"a":48345601,"name":"ack","f":48345601,"d_finished":0,"c":0,"l":48346512,"d":911},{"a":48345589,"name":"processing","f":48345589,"d_finished":0,"c":0,"l":48346512,"d":923},{"name":"ProduceResults","f":48345059,"d_finished":543,"c":2,"l":48345827,"d":543},{"a":48345831,"name":"Finish","f":48345831,"d_finished":0,"c":0,"l":48346512,"d":681}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2026-01-10T12:17:16.170212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:16.166794Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:16.170248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:16.170338Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-10T12:16:39.701395Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:16:39.705286Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:16:39.705666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:39.730393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:39.730748Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:39.738162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:39.738451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:39.738717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:39.738878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:39.738965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:39.739035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:39.739140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:39.739274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:39.739343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:39.739412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:39.739582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:39.739726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:39.739851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:39.762147Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:16:39.766351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:39.766696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:39.766769Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:39.766990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:39.767177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:39.767267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:39.767333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:39.767491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:39.767563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:39.767623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:39.767735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:39.767939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:39.768013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:39.768102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:39.768140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:39.768254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:39.768341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:39.768401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:39.768457Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:39.768532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:39.768578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:39.768611Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:39.768672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:39.768714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:39.768805Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:39.769036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:39.769099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:39.769139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:39.769290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:39.769343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:39.769379Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:39.769430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:39.769481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:39.769516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:39.769577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:39.769634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... nished=1; 2026-01-10T12:17:16.262026Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:17:16.262072Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:17:16.262368Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:16.262599Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.262669Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:16.262882Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-10T12:17:16.262974Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-10T12:17:16.263250Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-10T12:17:16.263511Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.263695Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.263890Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.264266Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:16.264477Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.264661Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.264944Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:513:2516] finished for tablet 9437184 2026-01-10T12:17:16.265562Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:512:2515];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":37066149,"name":"_full_task","f":37066149,"d_finished":0,"c":0,"l":37075166,"d":9017},"events":[{"name":"bootstrap","f":37066462,"d_finished":1338,"c":1,"l":37067800,"d":1338},{"a":37074372,"name":"ack","f":37072483,"d_finished":1606,"c":1,"l":37074089,"d":2400},{"a":37074348,"name":"processing","f":37067956,"d_finished":3718,"c":3,"l":37074092,"d":4536},{"name":"ProduceResults","f":37067318,"d_finished":2752,"c":6,"l":37074834,"d":2752},{"a":37074840,"name":"Finish","f":37074840,"d_finished":0,"c":0,"l":37075166,"d":326},{"name":"task_result","f":37067981,"d_finished":2041,"c":2,"l":37072240,"d":2041}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.265670Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:16.266314Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:512:2515];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":37066149,"name":"_full_task","f":37066149,"d_finished":0,"c":0,"l":37075873,"d":9724},"events":[{"name":"bootstrap","f":37066462,"d_finished":1338,"c":1,"l":37067800,"d":1338},{"a":37074372,"name":"ack","f":37072483,"d_finished":1606,"c":1,"l":37074089,"d":3107},{"a":37074348,"name":"processing","f":37067956,"d_finished":3718,"c":3,"l":37074092,"d":5243},{"name":"ProduceResults","f":37067318,"d_finished":2752,"c":6,"l":37074834,"d":2752},{"a":37074840,"name":"Finish","f":37074840,"d_finished":0,"c":0,"l":37075873,"d":1033},{"name":"task_result","f":37067981,"d_finished":2041,"c":2,"l":37072240,"d":2041}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:16.266454Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:16.254310Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-10T12:17:16.266513Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:16.266716Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047997.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047997.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046797.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-10T12:16:38.425448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:38.456385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:38.456619Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:38.463473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:38.463714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:38.463933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:38.464052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:38.464197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:38.464315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:38.464416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:38.464519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:38.464658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:38.464769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.464880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:38.464979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:38.465084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:38.494528Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:38.494870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:38.494924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:38.495117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.495300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:38.495407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:38.495453Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:38.495564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:38.495631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:38.495668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:38.495689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:38.495842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:38.495896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:38.495924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:38.495943Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:38.496006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:38.496067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:38.496131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:38.496153Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:38.496184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:38.496207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:38.496230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:38.496260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:38.496282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:38.496299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:38.496470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:38.496506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:38.496545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:38.496672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:38.496712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.496738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:38.496787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:38.496820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:38.496839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:38.496872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ranule/portions;fline=constructor_portion.cpp:44;memory_size=286;data_size=260;sum=5688;count=20;size_of_portion=192; 2026-01-10T12:17:16.788553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=3153; 2026-01-10T12:17:16.788646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-10T12:17:16.789309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=612; 2026-01-10T12:17:16.789363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=4130; 2026-01-10T12:17:16.789403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=4243; 2026-01-10T12:17:16.789458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-10T12:17:16.789519Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=21; 2026-01-10T12:17:16.789557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4860; 2026-01-10T12:17:16.789691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=83; 2026-01-10T12:17:16.789814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2026-01-10T12:17:16.789960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=88; 2026-01-10T12:17:16.790068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=65; 2026-01-10T12:17:16.792623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2479; 2026-01-10T12:17:16.795080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2378; 2026-01-10T12:17:16.795181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:17:16.795227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-10T12:17:16.795268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:17:16.795334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2026-01-10T12:17:16.795387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:17:16.795467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2026-01-10T12:17:16.795532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=22; 2026-01-10T12:17:16.795606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-01-10T12:17:16.795703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=43; 2026-01-10T12:17:16.795952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=207; 2026-01-10T12:17:16.795992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17996; 2026-01-10T12:17:16.796144Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:16.796245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:16.796299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:16.796357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:16.808010Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:16.808213Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:16.808311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:17:16.808383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:16.808431Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:16.808469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:16.808570Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:16.808810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.106000s; 2026-01-10T12:17:16.810102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:16.811067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:16.811136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:16.811216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:17:16.811278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:16.811320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:16.811366Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:16.811457Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:16.811694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.101000s; 2026-01-10T12:17:16.811730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:16:59.298659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:16:59.298771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:59.298810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:16:59.298870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:16:59.298905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:16:59.298935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:16:59.298998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:16:59.299068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:16:59.299878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:16:59.300163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:16:59.387301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:16:59.387364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:59.402237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:16:59.402350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:16:59.402515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:16:59.410600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:16:59.410807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:16:59.411509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:16:59.411743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:16:59.414017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:59.414192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:16:59.415324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:16:59.415397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:16:59.415464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:16:59.415508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:16:59.415553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:16:59.415806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:16:59.557685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.558666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.558840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.558927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:16:59.559791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... sole configs 2026-01-10T12:17:04.442992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:05.849676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0354 2026-01-10T12:17:05.860499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0633 2026-01-10T12:17:05.891359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-10T12:17:05.891578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:17:05.891665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:05.891784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:17:05.891838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-10T12:17:05.891868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:05.891925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:17:05.902357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:08.499788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0097 2026-01-10T12:17:08.510362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0166 2026-01-10T12:17:08.541061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-10T12:17:08.541168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:17:08.541203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:08.541276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:17:08.541313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-10T12:17:08.541336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:08.541374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:17:08.551730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:11.123481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0097 2026-01-10T12:17:11.134144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0166 2026-01-10T12:17:11.165149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-10T12:17:11.165393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:17:11.165464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:11.165573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:17:11.165625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-10T12:17:11.165658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:11.165695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:17:11.176161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:14.207049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0037 2026-01-10T12:17:14.217571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0054 2026-01-10T12:17:14.258584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-10T12:17:14.258725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:17:14.258791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:14.258929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:17:14.258982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-10T12:17:14.259024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:14.259071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:17:14.269448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:17.693333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-10T12:17:17.693450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:17.693619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:17.693785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409547, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 40 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-10T12:17:17.694208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:17.694808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:17.694852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:17.697290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:17.697478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:17.697574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:17.697638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTestsWithReboots::CopyTable |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:31.880256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:31.880363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.880417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:31.880461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:31.880499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:31.880544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:31.880646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:31.880751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:31.881668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:31.881992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:32.033024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:32.033103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:32.034096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:32.047472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:32.047574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:32.047748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:32.055457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:32.055791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:32.056593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.057096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:32.060566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:32.060800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:32.062053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:32.062116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:32.062234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:32.062269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:32.062312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:32.062558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.070553Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:32.239981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:32.240563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.240859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:32.240928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:32.241170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:32.241232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:32.244597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.244867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:32.245111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.245207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:32.245259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:32.245296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:32.248828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.248919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:32.248964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:32.252596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.252656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.252716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.252791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:32.256841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:32.258892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:32.259083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:32.260283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.260437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:32.260486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.260815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:32.260873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.261051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:32.261180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:32.264485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 3, at schemeshard: 72057594046678944, txId: 253 2026-01-10T12:17:16.462750Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 180], version: 5 2026-01-10T12:17:16.462791Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 180] was 2 2026-01-10T12:17:16.464529Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-10T12:17:16.464653Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-10T12:17:16.464695Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 253 2026-01-10T12:17:16.464734Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 181], version: 2 2026-01-10T12:17:16.464777Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 181] was 2 2026-01-10T12:17:16.465856Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-10T12:17:16.465959Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-10T12:17:16.465996Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 253 2026-01-10T12:17:16.466034Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 178], version: 2 2026-01-10T12:17:16.466070Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 178] was 2 2026-01-10T12:17:16.466151Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 253, subscribers: 0 2026-01-10T12:17:16.469712Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-10T12:17:16.469898Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-10T12:17:16.469988Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-10T12:17:16.472312Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-10T12:17:16.472447Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 TestModificationResult got TxId: 253, wait until txId: 253 TestWaitNotification wait txId: 253 2026-01-10T12:17:16.474754Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 253: send EvNotifyTxCompletion 2026-01-10T12:17:16.474809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 253 2026-01-10T12:17:16.477180Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 253, at schemeshard: 72057594046678944 2026-01-10T12:17:16.477312Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 253: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.477353Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 253: satisfy waiter [32:4249:6236] TestWaitNotification: OK eventTxId 253 TestWaitNotification wait txId: 245 2026-01-10T12:17:16.479247Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 245: send EvNotifyTxCompletion 2026-01-10T12:17:16.479296Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 245 TestWaitNotification wait txId: 246 2026-01-10T12:17:16.479384Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 246: send EvNotifyTxCompletion 2026-01-10T12:17:16.479414Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 246 TestWaitNotification wait txId: 247 2026-01-10T12:17:16.479485Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 247: send EvNotifyTxCompletion 2026-01-10T12:17:16.479515Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 247 TestWaitNotification wait txId: 248 2026-01-10T12:17:16.479577Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 248: send EvNotifyTxCompletion 2026-01-10T12:17:16.479607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 248 TestWaitNotification wait txId: 249 2026-01-10T12:17:16.479672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 249: send EvNotifyTxCompletion 2026-01-10T12:17:16.479701Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 249 TestWaitNotification wait txId: 250 2026-01-10T12:17:16.479763Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 250: send EvNotifyTxCompletion 2026-01-10T12:17:16.479793Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 250 TestWaitNotification wait txId: 251 2026-01-10T12:17:16.479857Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 251: send EvNotifyTxCompletion 2026-01-10T12:17:16.479886Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 251 TestWaitNotification wait txId: 252 2026-01-10T12:17:16.479951Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 252: send EvNotifyTxCompletion 2026-01-10T12:17:16.479986Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 252 2026-01-10T12:17:16.482846Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 245, at schemeshard: 72057594046678944 2026-01-10T12:17:16.483168Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 245: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.483216Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 245: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.483599Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 246, at schemeshard: 72057594046678944 2026-01-10T12:17:16.483692Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 247, at schemeshard: 72057594046678944 2026-01-10T12:17:16.483884Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 246: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.483911Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 246: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.484070Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 248, at schemeshard: 72057594046678944 2026-01-10T12:17:16.484149Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 247: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.484175Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 247: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.484301Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 249, at schemeshard: 72057594046678944 2026-01-10T12:17:16.484384Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 250, at schemeshard: 72057594046678944 2026-01-10T12:17:16.484447Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 248: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.484470Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 248: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.484555Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 251, at schemeshard: 72057594046678944 2026-01-10T12:17:16.484664Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 249: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.484689Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 249: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.484792Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 252, at schemeshard: 72057594046678944 2026-01-10T12:17:16.484832Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 250: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.484854Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 250: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.484956Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 251: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.484981Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 251: satisfy waiter [32:4252:6239] 2026-01-10T12:17:16.485087Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 252: got EvNotifyTxCompletionResult 2026-01-10T12:17:16.485121Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 252: satisfy waiter [32:4252:6239] TestWaitNotification: OK eventTxId 245 TestWaitNotification: OK eventTxId 246 TestWaitNotification: OK eventTxId 247 TestWaitNotification: OK eventTxId 248 TestWaitNotification: OK eventTxId 249 TestWaitNotification: OK eventTxId 250 TestWaitNotification: OK eventTxId 251 TestWaitNotification: OK eventTxId 252 >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:18.571866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:18.571969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:18.572011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:18.572052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:18.572115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:18.572148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:18.572214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:18.572283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:18.573111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:18.573364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:18.660476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:18.660517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:18.675511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:18.675616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:18.675799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:18.683623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:18.683798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:18.684361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:18.684579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:18.686500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.686634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:18.687457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:18.687499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.687559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:18.687589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:18.687632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:18.687882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:18.810200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.811968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.812028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.812113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.812213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.812283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.812331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:17:19.796738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:17:19.796781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:17:19.796818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-10T12:17:19.796872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:17:19.797762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:17:19.797842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:17:19.797871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:17:19.797898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:17:19.797926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-10T12:17:19.798013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:17:19.799479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 300 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1176 } } CommitVersion { Step: 300 TxId: 103 } 2026-01-10T12:17:19.799519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-10T12:17:19.799650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 300 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1176 } } CommitVersion { Step: 300 TxId: 103 } 2026-01-10T12:17:19.799778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 300 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1176 } } CommitVersion { Step: 300 TxId: 103 } debug: NTableState::TProposedWaitParts operationId# 103:0 2026-01-10T12:17:19.801075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 964 RawX2: 4294970204 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:17:19.801120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-10T12:17:19.801245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 964 RawX2: 4294970204 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:17:19.801303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:19.801415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 964 RawX2: 4294970204 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-10T12:17:19.801491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.801550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.801589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:17:19.801623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-10T12:17:19.805000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:17:19.805110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:17:19.805271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.805389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.805497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.805546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:17:19.805637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:17:19.805678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:19.805719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:17:19.805747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:19.805779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:17:19.805845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:824:2791] message: TxId: 103 2026-01-10T12:17:19.805889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:19.805933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:17:19.805964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:17:19.806063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:17:19.807639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:17:19.807686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:992:2928] TestWaitNotification: OK eventTxId 103 W0000 00:00:1768047439.808257 286053 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2026-01-10T12:17:19.811291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:19.811657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.811781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-10T12:17:19.812180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2026-01-10T12:17:19.814076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:19.814293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:00.833879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:00.833963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:00.834002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:00.834047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:00.834077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:00.834102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:00.834174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:00.834238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:00.835010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:00.835339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:00.911344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:00.911410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:00.925578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:00.925710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:00.925842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:00.935038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:00.935329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:00.936137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:00.936407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:00.939224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:00.939389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:00.940560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:00.940623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:00.940692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:00.940744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:00.940789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:00.941046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:01.048408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:01.049851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 2026-01-10T12:17:19.686950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.687077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:19.687167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-10T12:17:19.687438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-10T12:17:19.687627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-10T12:17:19.693457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:19.693504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:17:19.693730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.693768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:17:19.694064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.694108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:19.694670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:19.694760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:19.694811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:17:19.694886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:17:19.694940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:19.695024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:17:19.696676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1268 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:19.696725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:19.696841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1268 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:19.696936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1268 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-10T12:17:19.697808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:19.697846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:19.697969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:19.698017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:19.698087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:19.698150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.698185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.698219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:19.698256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:19.699587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:17:19.701023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.701757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.701984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.702044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:19.702135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:19.702191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:19.702224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:19.702251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:19.702279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:19.702326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 102 2026-01-10T12:17:19.702364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:19.702393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:19.702429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:19.702563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:19.703989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:19.704027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1024:2977] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:19.704372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:19.704422Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__conditional_erase.cpp:398: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2026-01-10T12:17:19.705724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:19.705835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:19.705880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.055500Z, at schemeshard: 72057594046678944 2026-01-10T12:17:19.705937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TColumnShardTestSchema::HotTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:19.182315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:19.182385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.182420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:19.182452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:19.182478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:19.182500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:19.182547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.182622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:19.183257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:19.183447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:19.246464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:19.246506Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:19.257542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:19.257661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:19.257791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:19.263871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:19.264026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:19.264542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.264759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:19.266809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.266985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:19.268049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:19.268120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.268185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:19.268228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:19.268276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:19.268555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.373507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.374913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:17:20.129584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:17:20.130415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.130470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:20.131973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:20.132110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:20.132155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:20.132214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:17:20.132262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:20.132794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:20.132850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:20.132867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:20.132886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:17:20.132904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:20.132956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:17:20.133775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1296 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:20.133803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:20.133944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1296 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:20.134073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1296 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:17:20.134856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:20.134910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:20.135079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:20.135137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:20.135259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:20.135328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:20.135379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.135445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:20.135494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:20.139749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:20.140027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:20.140567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.140912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.141219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.141263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:20.141375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:20.141408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:20.141461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:20.141509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:20.141547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:20.141601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 101 2026-01-10T12:17:20.141636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:20.141665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:20.141702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:20.141824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:20.143461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:20.143527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:749:2727] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:17:20.146211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:20.146472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:20.146819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2026-01-10T12:17:20.148564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:20.148752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047979.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047979.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046779.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047979.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148047979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046779.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046779.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046779.000000s;Name=;Codec=}; 2026-01-10T12:16:19.730938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:19.764210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:19.764456Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:19.771798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:19.772067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:19.772324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:19.772450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:19.772565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:19.772709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:19.772815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:19.772933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:19.773055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:19.773168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:19.773268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:19.773372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:19.773486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:19.804648Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:19.804983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:19.805058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:19.805196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:19.805334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:19.805411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:19.805457Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:19.805556Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:19.805606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:19.805638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:19.805663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:19.805785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:19.805833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:19.805866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:19.805899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:19.805978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:19.806027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:19.806063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:19.806085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:19.806135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:19.806171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:19.806196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:19.806229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:19.806257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:19.806279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:19.806422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:19.806457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:19.806480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:19.806576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:19.806610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:19.806631Z node 1 :TX_ ... round=cleanup;skip_reason=no_changes; 2026-01-10T12:17:20.064045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:20.064315Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047428047:max} readable: {1768047428047:max} at tablet 9437184 2026-01-10T12:17:20.064458Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:20.064642Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047428047:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.064728Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047428047:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.065286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047428047:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:20.067159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047428047:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:20.068251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047428047:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2026-01-10T12:17:20.068843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:20.069082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:20.069333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.069522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.069946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:20.070099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.070230Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.070492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2026-01-10T12:17:20.071048Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":63821757,"name":"_full_task","f":63821757,"d_finished":0,"c":0,"l":63824150,"d":2393},"events":[{"name":"bootstrap","f":63822110,"d_finished":1052,"c":1,"l":63823162,"d":1052},{"a":63823496,"name":"ack","f":63823496,"d_finished":0,"c":0,"l":63824150,"d":654},{"a":63823475,"name":"processing","f":63823475,"d_finished":0,"c":0,"l":63824150,"d":675},{"name":"ProduceResults","f":63822792,"d_finished":654,"c":2,"l":63823847,"d":654},{"a":63823853,"name":"Finish","f":63823853,"d_finished":0,"c":0,"l":63824150,"d":297}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.071144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:20.071689Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":63821757,"name":"_full_task","f":63821757,"d_finished":0,"c":0,"l":63824775,"d":3018},"events":[{"name":"bootstrap","f":63822110,"d_finished":1052,"c":1,"l":63823162,"d":1052},{"a":63823496,"name":"ack","f":63823496,"d_finished":0,"c":0,"l":63824775,"d":1279},{"a":63823475,"name":"processing","f":63823475,"d_finished":0,"c":0,"l":63824775,"d":1300},{"name":"ProduceResults","f":63822792,"d_finished":654,"c":2,"l":63823847,"d":654},{"a":63823853,"name":"Finish","f":63823853,"d_finished":0,"c":0,"l":63824775,"d":922}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2026-01-10T12:17:20.071813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:20.067126Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:20.071860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:20.072007Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTTLTests::TtlTiersValidation >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TSchemeShardTTLTests::CheckCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168048003.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168048003.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168048003.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168048003.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168048003.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148048003.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=168048003.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168048003.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046803.000000s;Name=;Codec=}; 2026-01-10T12:16:43.637907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:43.667295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:43.667538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:43.674234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:43.674473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:43.674703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:43.674820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:43.674921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:43.675049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:43.675153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:43.675254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:43.675389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:43.675499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:43.675597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:43.675695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:43.675810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:43.704602Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:43.704958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:43.705017Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:43.705198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:43.705370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:43.705460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:43.705504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:43.705595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:43.705668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:43.705717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:43.705748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:43.705901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:43.705960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:43.705997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:43.706023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:43.706108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:43.706182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:43.706233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:43.706261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:43.706310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:43.706347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:43.706378Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:43.706421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:43.706456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:43.706486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:43.706658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:43.706703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:43.706731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:43.706839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:43.706880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:43.706906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:43.706950Z node 1 :TX ... mpl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:20.753070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:20.753323Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047439959:max} readable: {1768047439959:max} at tablet 9437184 2026-01-10T12:17:20.753468Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:20.753696Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047439959:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.753774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047439959:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.754327Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047439959:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:20.756208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047439959:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:20.757290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047439959:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1327:3296];trace_detailed=; 2026-01-10T12:17:20.757851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:20.758093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:20.758391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.758583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.759009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:20.759162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.759317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.759580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1327:3296] finished for tablet 9437184 2026-01-10T12:17:20.760116Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1326:3295];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":39871445,"name":"_full_task","f":39871445,"d_finished":0,"c":0,"l":39873892,"d":2447},"events":[{"name":"bootstrap","f":39871768,"d_finished":1117,"c":1,"l":39872885,"d":1117},{"a":39873225,"name":"ack","f":39873225,"d_finished":0,"c":0,"l":39873892,"d":667},{"a":39873203,"name":"processing","f":39873203,"d_finished":0,"c":0,"l":39873892,"d":689},{"name":"ProduceResults","f":39872489,"d_finished":691,"c":2,"l":39873593,"d":691},{"a":39873605,"name":"Finish","f":39873605,"d_finished":0,"c":0,"l":39873892,"d":287}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.760221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1326:3295];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:20.760765Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1326:3295];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":39871445,"name":"_full_task","f":39871445,"d_finished":0,"c":0,"l":39874523,"d":3078},"events":[{"name":"bootstrap","f":39871768,"d_finished":1117,"c":1,"l":39872885,"d":1117},{"a":39873225,"name":"ack","f":39873225,"d_finished":0,"c":0,"l":39874523,"d":1298},{"a":39873203,"name":"processing","f":39873203,"d_finished":0,"c":0,"l":39874523,"d":1320},{"name":"ProduceResults","f":39872489,"d_finished":691,"c":2,"l":39873593,"d":691},{"a":39873605,"name":"Finish","f":39873605,"d_finished":0,"c":0,"l":39874523,"d":918}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1327:3296]->[1:1326:3295] 2026-01-10T12:17:20.760881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:20.756168Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:20.760934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:20.761083Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047981.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047981.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047981.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047981.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047981.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047981.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046781.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047981.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148047981.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046781.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046781.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046781.000000s;Name=;Codec=}; 2026-01-10T12:16:21.351416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:21.384356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:21.384643Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:21.392172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:21.392469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:21.392725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:21.392876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:21.392993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:21.393153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:21.393280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:21.393411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:21.393550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:21.393654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:21.393766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:21.393876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:21.393991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:21.425678Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:21.426080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:21.426162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:21.426355Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:21.426519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:21.426610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:21.426666Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:21.426790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:21.426881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:21.426935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:21.426972Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:21.427149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:21.427232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:21.427277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:21.427311Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:21.427408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:21.427495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:21.427579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:21.427616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:21.427677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:21.427717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:21.427749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:21.427799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:21.427843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:21.427874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:21.428124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:21.428212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:21.428254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:21.428389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:21.428435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:21.428 ... hard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:20.716516Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:20.716752Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047429666:max} readable: {1768047429666:max} at tablet 9437184 2026-01-10T12:17:20.716903Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:20.717100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047429666:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.717175Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047429666:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:20.717764Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047429666:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:20.719534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047429666:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:20.720624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047429666:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2026-01-10T12:17:20.721142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:20.721383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:20.721636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.721812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.722187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:20.722335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.722467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.722691Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2026-01-10T12:17:20.723227Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":62587008,"name":"_full_task","f":62587008,"d_finished":0,"c":0,"l":62589247,"d":2239},"events":[{"name":"bootstrap","f":62587315,"d_finished":1009,"c":1,"l":62588324,"d":1009},{"a":62588635,"name":"ack","f":62588635,"d_finished":0,"c":0,"l":62589247,"d":612},{"a":62588615,"name":"processing","f":62588615,"d_finished":0,"c":0,"l":62589247,"d":632},{"name":"ProduceResults","f":62587987,"d_finished":604,"c":2,"l":62588963,"d":604},{"a":62588967,"name":"Finish","f":62588967,"d_finished":0,"c":0,"l":62589247,"d":280}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:20.723316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:20.723704Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":62587008,"name":"_full_task","f":62587008,"d_finished":0,"c":0,"l":62589840,"d":2832},"events":[{"name":"bootstrap","f":62587315,"d_finished":1009,"c":1,"l":62588324,"d":1009},{"a":62588635,"name":"ack","f":62588635,"d_finished":0,"c":0,"l":62589840,"d":1205},{"a":62588615,"name":"processing","f":62588615,"d_finished":0,"c":0,"l":62589840,"d":1225},{"name":"ProduceResults","f":62587987,"d_finished":604,"c":2,"l":62588963,"d":604},{"a":62588967,"name":"Finish","f":62588967,"d_finished":0,"c":0,"l":62589840,"d":873}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2026-01-10T12:17:20.723780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:20.719503Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:20.723811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:20.723920Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047992.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168047992.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047992.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148047992.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046792.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046792.000000s;Name=;Codec=}; 2026-01-10T12:16:34.275174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:34.295144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:34.295447Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:34.301426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:34.301630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:34.301804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:34.301887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:34.301978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:34.302068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:34.302134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:34.302202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:34.302307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:34.302383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.302472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:34.302537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:34.302670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:34.323071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:34.323332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:34.323396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:34.323533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.323672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:34.323749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:34.323784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:34.323870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:34.323923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:34.323952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:34.323977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:34.324115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:34.324163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:34.324204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:34.324229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:34.324303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:34.324354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:34.324393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:34.324418Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:34.324462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:34.324488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:34.324519Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:34.324554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:34.324581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:34.324600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:34.324760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:34.324801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:34.324834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:34.324920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:34.324948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.324967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:34.325002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:34.325031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:34.325051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:34.325094Z node 1 :TX_COLUM ... ard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2026-01-10T12:17:21.725686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2026-01-10T12:17:21.725713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2157; 2026-01-10T12:17:21.725743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2257; 2026-01-10T12:17:21.725782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2026-01-10T12:17:21.725841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=22; 2026-01-10T12:17:21.725870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2694; 2026-01-10T12:17:21.725966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=54; 2026-01-10T12:17:21.726072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2026-01-10T12:17:21.726190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=78; 2026-01-10T12:17:21.726258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=43; 2026-01-10T12:17:21.727432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1131; 2026-01-10T12:17:21.728944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1444; 2026-01-10T12:17:21.729003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2026-01-10T12:17:21.729049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:17:21.729077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:17:21.729126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=28; 2026-01-10T12:17:21.729150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-10T12:17:21.729204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2026-01-10T12:17:21.729227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-10T12:17:21.729283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2026-01-10T12:17:21.729341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=35; 2026-01-10T12:17:21.729500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=136; 2026-01-10T12:17:21.729528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12898; 2026-01-10T12:17:21.729630Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:21.729706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:21.729745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:21.729800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:21.738418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:21.738561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:21.738637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:21.738694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047131805;tx_id=18446744073709551615;;current_snapshot_ts=1768047419692; 2026-01-10T12:17:21.738730Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:21.738766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:21.738802Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:21.738866Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:21.739042Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.123000s; 2026-01-10T12:17:21.740992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:21.741101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:21.741149Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:21.741280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:21.741347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047131805;tx_id=18446744073709551615;;current_snapshot_ts=1768047419692; 2026-01-10T12:17:21.741393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:21.741446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:21.741505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:21.741595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:21.742273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.158000s; 2026-01-10T12:17:21.742328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:14.673306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:14.673387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:14.673420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:14.673464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:14.673498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:14.673540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:14.673600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:14.673663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:14.674357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:14.674593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:14.736944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:14.736995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:14.748401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:14.748502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:14.748657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:14.756291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:14.756471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:14.757099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:14.757292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:14.759129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:14.759289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:14.760278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:14.760328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:14.760378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:14.760425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:14.760465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:14.760723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:14.877322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.878971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:14.879643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 2026-01-10T12:17:21.142516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.142993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.143037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:17:21.143165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:17:21.143200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:21.143240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:17:21.143275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:21.143312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:17:21.143382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3114:4334] message: TxId: 103 2026-01-10T12:17:21.143436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:17:21.143490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:17:21.143527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:17:21.145043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-10T12:17:21.150911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:17:21.150971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4366:5525] TestWaitNotification: OK eventTxId 103 2026-01-10T12:17:21.151593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:21.151859Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 290us result status StatusSuccess 2026-01-10T12:17:21.152547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-10T12:17:21.155991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:21.156224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.156683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2026-01-10T12:17:21.159031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:21.159240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:17:21.159571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:17:21.159614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:17:21.160044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:17:21.160154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:17:21.160198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4708:5866] TestWaitNotification: OK eventTxId 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:20.984368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:20.984483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:20.984521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:20.984560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:20.984592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:20.984617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:20.984694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:20.984788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:20.985665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:20.985901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:21.059697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:21.059759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:21.075412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:21.075530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:21.075706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:21.084643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:21.084861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:21.085444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:21.085655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:21.087918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.088066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:21.088992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:21.089044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.089093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:21.089131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:21.089165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:21.089377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.237903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.238942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.239936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.240046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.240145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... __operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:17:22.127139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:22.127283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:17:22.127345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:17:22.127398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-10T12:17:22.127444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:17:22.127571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:17:22.131552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:17:22.154730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1311 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:22.154798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:22.154994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1311 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:17:22.155132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1311 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:17:22.156187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:22.156249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:22.156408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:22.156457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:22.156547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:17:22.156615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:22.156646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.156673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:22.156725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:17:22.159402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.159843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.160001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.160050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:17:22.160180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:22.160216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:22.160254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:17:22.160289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:22.160348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:17:22.160434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2755] message: TxId: 102 2026-01-10T12:17:22.160481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:17:22.160518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:17:22.160573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:17:22.160705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:22.162615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:22.162684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:921:2844] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:22.163228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:22.163582Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 310us result status StatusSuccess 2026-01-10T12:17:22.164187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:17.951121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:17.951221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:17.951283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:17.951352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:17.951393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:17.951423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:17.951535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:17.951647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:17.952564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:17.952894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:18.027010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:18.027072Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:18.041614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:18.041712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:18.041843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:18.048441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:18.048664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:18.049412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:18.049648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:18.052045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.052265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:18.053481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:18.053544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.053613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:18.053661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:18.053722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:18.054040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:18.196017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.197925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.198033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.198139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.198212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409551, request: TableId: 43 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640223000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-10T12:17:22.034345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-10T12:17:22.035248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:22.035660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-10T12:17:22.035966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:22.036312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:17:22.037037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:17:22.037428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.037487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:22.039288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.039337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:17:22.040034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.040073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:17:22.043103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.043264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.043305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.223000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.045816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.045858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-10T12:17:22.046668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.046708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:22.047264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.047369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.047400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.224000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.047459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.047515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.047592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.047620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.224000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.049179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.049366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.049534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.049582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.118403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2026-01-10T12:17:22.118778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-10T12:17:22.118869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:22.118961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-10T12:17:22.119023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:17:22.119055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:22.119101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-10T12:17:22.119217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2026-01-10T12:17:22.119265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2026-01-10T12:17:22.119351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-01-10T12:17:22.119403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2026-01-10T12:17:22.119467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2026-01-10T12:17:22.119541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-10T12:17:22.119579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2026-01-10T12:17:22.119617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2026-01-10T12:17:22.119673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2026-01-10T12:17:22.132387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.132491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-10T12:17:22.134858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:22.135039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:22.135092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.228000Z, at schemeshard: 72057594046678944 2026-01-10T12:17:22.135163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:20.935932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:20.936038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:20.936105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:20.936153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:20.936198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:20.936228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:20.936305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:20.936402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:20.937301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:20.937617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:21.023752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:21.023816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:21.040448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:21.040586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:21.040776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:21.050505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:21.050678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:21.051347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:21.051553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:21.053671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.053818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:21.054809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:21.054857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.054903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:21.054937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:21.054975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:21.055229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:21.187405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:21.188988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:22.354110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:22.354141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-10T12:17:22.354185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-10T12:17:22.355918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.355974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-10T12:17:22.356043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:17:22.356071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:22.356139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-10T12:17:22.356178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:22.356213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:17:22.356259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710760 2026-01-10T12:17:22.356298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:17:22.356329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:17:22.356354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:17:22.356423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:17:22.358090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:17:22.358145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:17:22.358207Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-10T12:17:22.358317Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:797:2767], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:17:22.359861Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-10T12:17:22.359974Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:797:2767], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:17:22.360024Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:17:22.361487Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-10T12:17:22.361613Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:797:2767], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:17:22.361663Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-10T12:17:22.361793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:17:22.361834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:888:2847] TestWaitNotification: OK eventTxId 102 2026-01-10T12:17:22.362563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:22.362807Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 504us result status StatusSuccess 2026-01-10T12:17:22.363314Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> Cdc::InitialScan [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:21.834037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:21.834132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:21.834174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:21.834219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:21.834253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:21.834280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:21.834368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:21.834443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:21.835299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:21.835558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:21.909142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:21.909193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:21.923961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:21.924070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:21.924266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:21.931886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:21.932109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:21.932778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:21.932978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:21.935040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.935238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:21.936360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:21.936418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:21.936481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:21.936524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:21.936572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:21.936833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.097301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.098919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.099523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... PathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:22.965924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:22.966000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:22.966027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:22.966059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:17:22.966089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:22.966160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:17:22.967044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1122 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:22.967087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:22.967266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1122 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:22.967399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1122 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:17:22.968442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:22.968496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:22.968640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:22.968704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:22.968832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:22.968910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:22.969024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.969067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:22.969103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:22.972842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:22.973177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:22.973821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.974320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.974681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.974734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:22.974844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:22.974883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:22.974943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:22.974995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:22.975037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:22.975113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 101 2026-01-10T12:17:22.975171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:22.975216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:22.975253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:22.975421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:22.977175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:22.977217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:749:2727] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:17:22.980274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:22.980547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.980876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2026-01-10T12:17:22.982748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:22.982958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-10T12:17:22.985656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:22.985842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.986133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2026-01-10T12:17:22.988106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:22.988283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin >> KqpQueryPerf::Delete+QueryService-UseSink |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system >> KqpWorkload::KV >> KqpQueryPerf::ComputeLength+QueryService >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::IndexUpsert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:23.133044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:23.133135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.133181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:23.133229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:23.133263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:23.133291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:23.133355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.133427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:23.134231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:23.134510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:23.214499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:23.214538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:23.225451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:23.225529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:23.225673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:23.233095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:23.233415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:23.234286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:23.234558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:23.237446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:23.237660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:23.238960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:23.239031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:23.239110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:23.239162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:23.239213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:23.239550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:23.388184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.389979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.390038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ode 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-10T12:17:24.342186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:24.342233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-10T12:17:24.342343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:24.342435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:24.342516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:24.342571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:24.342609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.342651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:17:24.342683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:24.344556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:24.344667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:24.346306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:24.346441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:24.346562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:24.346629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.347839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:24.348161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-10T12:17:24.348220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-10T12:17:24.348330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-10T12:17:24.348365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-10T12:17:24.348435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-10T12:17:24.348475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-10T12:17:24.348534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-10T12:17:24.348752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.349071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.349109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:24.349155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:17:24.349173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:24.349199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:17:24.349216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:24.349235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-10T12:17:24.349282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2755] message: TxId: 101 2026-01-10T12:17:24.349320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:17:24.349354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:24.349380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:24.349494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:24.349529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-10T12:17:24.349551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-10T12:17:24.349576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:17:24.349593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-10T12:17:24.349613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-10T12:17:24.349647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:17:24.351240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.351279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:789:2756] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:24.351682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:24.351910Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 226us result status StatusSuccess 2026-01-10T12:17:24.352343Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:23.554158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:23.554263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.554314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:23.554361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:23.554399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:23.554430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:23.554495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.554564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:23.555441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:23.555736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:23.650713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:23.650764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:23.666697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:23.666807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:23.666981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:23.675952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:23.676202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:23.676931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:23.677144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:23.681360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:23.681559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:23.682740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:23.682795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:23.682857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:23.682898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:23.682937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:23.683203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:23.829799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.830778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.830914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:23.831930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:17:24.403317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:24.403339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:24.404607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.404658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:24.404688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:24.405875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.405907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.405947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.406001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.406092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:24.407254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:24.407387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:24.407612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:24.407704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:24.407750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.407934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:24.407975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.408094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:24.408152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:24.409547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:24.409586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:24.409719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:24.409786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:24.410085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.410149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:24.410289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:24.410324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.410358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:24.410387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.410416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:24.410453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.410484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:24.410519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:24.410589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:24.410625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:24.410665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:24.411297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:24.411393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:24.411426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:24.411459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:24.411498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:24.411588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:24.414228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:24.414690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1768047444.415578 287527 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-10T12:17:24.415841Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:17:24.416628Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:17:24.418970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:24.419294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.419445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-10T12:17:24.419862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1768047444 seconds (20463 days, 56 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2026-01-10T12:17:24.420500Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:24.423377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1768047444 seconds (20463 days, 56 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:24.423620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1768047444 seconds (20463 days, 56 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:24.424065Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:23.997250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:23.997333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.997367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:23.997403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:23.997439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:23.997467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:23.997555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:23.997635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:23.998346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:23.998599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:24.081726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:24.081789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:24.097098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:24.097213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:24.097407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:24.105756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:24.105983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:24.106721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:24.106950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:24.109459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:24.109652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:24.110811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:24.110874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:24.110943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:24.110984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:24.111034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:24.111317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.255575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.258953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:24.259497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... peration type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:24.820780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:24.820972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:17:24.821209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.821277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:24.821324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:24.821363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:24.823301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.823368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:24.823410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:24.825226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.825288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.825351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.825431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.825572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:24.827230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:24.827391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:24.827781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:24.827914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:24.827968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.828319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:24.828395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:24.828563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:24.828644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:24.830612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:24.830666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:24.830895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:24.830963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:24.831351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.831413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:24.831589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:24.831634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.831674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:24.831708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.831748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:24.831793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:24.831847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:24.831894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:24.831965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:24.832031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:24.832101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:24.832746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:24.832888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:24.832937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:24.833006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:24.833055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:24.833149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:24.836319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:24.836888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:24.838345Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:17:24.839501Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:17:24.842893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:24.843242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:24.843712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2026-01-10T12:17:24.844577Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:24.847746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:24.847981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-10T12:17:24.848506Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpQueryPerf::RangeRead+QueryService >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink >> TColumnShardTestSchema::ColdTiers [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: 2026-01-10T12:14:37.806205Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:14:37.949272Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:14:37.949347Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:14:37.949404Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:37.949465Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:14:37.996278Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:14:38.041866Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "important_user" Generation: 1 Important: true } 2026-01-10T12:14:38.042943Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:14:38.045588Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:14:38.057845Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:14:38.058331Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1863381b-c9139d3f-9fc3bc27-32fb0559_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-10T12:14:38.148971Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.190523Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.216187Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.226829Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.276316Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.326308Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.366437Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.543376Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.602827Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.860554Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:38.872523Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:39.180425Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:39.443826Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:39.468431Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:39.809470Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:40.112349Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:40.418692Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:40.728399Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:40.752673Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:41.050773Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-10T12:14:41.540463Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:41.856072Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:42.124565Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:42.405506Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:42.416066Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:42.775087Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.102924Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.404383Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.701453Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.942719Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:43.989789Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:44.382098Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:44.677835Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:45.021671Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:45.340462Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:45.460440Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:45.654294Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:45.924470Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:46.228011Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:46.532364Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:46.836050Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:46.860325Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:14:48.450738Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates f ... 't have tx info 2026-01-10T12:17:18.681445Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:17:18.681523Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:18.681607Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927938 is [36:158:2176] sender: [36:159:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:184:2057] recipient: [36:14:2061] 2026-01-10T12:17:18.705909Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:18.706681Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1002 actor [36:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-10T12:17:18.707410Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:190:2142] 2026-01-10T12:17:18.710458Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:190:2142] 2026-01-10T12:17:19.648298Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:17:19.648806Z node 36 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|62b5340f-52997590-a5b8580e-9208a443_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:262:2057] recipient: [36:104:2137] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:265:2057] recipient: [36:264:2258] Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:267:2057] recipient: [36:264:2258] 2026-01-10T12:17:21.870323Z node 36 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:17:21.870392Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:17:21.870982Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:21.871049Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:17:21.871570Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:315:2259] 2026-01-10T12:17:21.896463Z node 36 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:17:21.897352Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [36:315:2259] 2026-01-10T12:17:21.923078Z node 36 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-10T12:17:21.951077Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:17:21.951246Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:339:2057] recipient: [36:14:2061] 2026-01-10T12:17:21.955907Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:21.960147Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-10T12:17:21.960473Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1003 actor [36:336:2310] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-10T12:17:22.380381Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:112:2057] recipient: [37:105:2138] 2026-01-10T12:17:22.429418Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:17:22.429478Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:17:22.429521Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:22.429570Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927938 is [37:157:2176] sender: [37:158:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:181:2057] recipient: [37:14:2061] 2026-01-10T12:17:22.449822Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:22.450411Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1004 actor [37:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2026-01-10T12:17:22.450983Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:187:2142] 2026-01-10T12:17:22.453954Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [37:187:2142] 2026-01-10T12:17:23.511952Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:17:23.512512Z node 37 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|895b0918-7e108ad9-7324bbeb-683bdc42_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:259:2057] recipient: [37:103:2137] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:262:2057] recipient: [37:261:2256] Leader for TabletID 72057594037927937 is [37:263:2257] sender: [37:264:2057] recipient: [37:261:2256] 2026-01-10T12:17:25.736768Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:17:25.736863Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:17:25.737387Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:25.737440Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:17:25.738051Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:312:2257] 2026-01-10T12:17:25.767354Z node 37 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:17:25.768403Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [37:312:2257] 2026-01-10T12:17:25.806111Z node 37 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-10T12:17:25.832255Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:17:25.832367Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [37:263:2257] sender: [37:336:2057] recipient: [37:14:2061] 2026-01-10T12:17:25.837358Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:17:25.841076Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-10T12:17:25.841333Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1005 actor [37:333:2308] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:29.819970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:29.820102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.820158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:29.820207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:29.820255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:29.820299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:29.820346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.820410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:29.821160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:29.821382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:29.949725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:29.949798Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:29.950660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:29.965789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:29.966252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:29.966429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:29.973993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:29.974326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:29.975033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:29.975431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:29.978077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:29.978264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:29.979489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:29.979548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:29.979604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:29.979645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:29.979687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:29.979873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:29.986475Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:30.167403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:30.167718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.167927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:30.167989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:30.168222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:30.168296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:30.170843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:30.171056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:30.171267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.171325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:30.171376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:30.171411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:30.173829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.173909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:30.173951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:30.175805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.175873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.175929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.176013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:30.180617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:30.182565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:30.182762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:30.183882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:30.184021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:30.184095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.184393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:30.184458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.184661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:30.184732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:30.186765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 2 2026-01-10T12:17:24.702280Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-10T12:17:24.702319Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-10T12:17:24.702357Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-10T12:17:24.702389Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-10T12:17:24.702419Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-10T12:17:24.704430Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.704576Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.704644Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:24.704690Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:17:24.704743Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:17:24.706313Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.706431Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.706478Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:24.706520Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:17:24.706581Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:17:24.708460Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.708568Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.708614Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:24.708655Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-10T12:17:24.708699Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:17:24.709404Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.709517Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:17:24.709560Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:17:24.709598Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-10T12:17:24.709637Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-10T12:17:24.709717Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:17:24.713092Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:24.713336Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:24.715380Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:17:24.715528Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:17:24.717440Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:17:24.717500Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:17:24.719720Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:17:24.719860Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.719910Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2681:4670] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:17:24.721509Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:17:24.721569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:17:24.721696Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:17:24.721734Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:17:24.721809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:17:24.721842Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:17:24.721914Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:17:24.721946Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:17:24.722050Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:17:24.722087Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:17:24.724744Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:17:24.724887Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.724939Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2684:4673] 2026-01-10T12:17:24.725320Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:17:24.725532Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:17:24.725603Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.725642Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2684:4673] 2026-01-10T12:17:24.725788Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.725822Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2684:4673] 2026-01-10T12:17:24.725959Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:17:24.726097Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:17:24.726204Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.726240Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2684:4673] 2026-01-10T12:17:24.726384Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:17:24.726422Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2684:4673] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047988.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047988.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047988.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047988.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047988.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047988.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046788.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047988.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148047988.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046788.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046788.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046788.000000s;Name=;Codec=}; 2026-01-10T12:16:28.429929Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:28.451390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:28.451598Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:28.458006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:28.458225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:28.458425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:28.458518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:28.458583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:28.458661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:28.458732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:28.458805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:28.458889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:28.458959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.459025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:28.459103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:28.459177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:28.481696Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:28.481992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:28.482043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:28.482228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.482370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:28.482466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:28.482505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:28.482581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:28.482645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:28.482699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:28.482731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:28.482882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.482948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:28.482981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:28.483004Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:28.483076Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:28.483124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:28.483170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:28.483204Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:28.483240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:28.483267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:28.483288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:28.483323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:28.483348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:28.483404Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:28.483596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:28.483646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:28.483684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:28.483807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:28.483845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.483867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.483905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:28.483948Z node 1 :TX_COLUMNSHARD WARN: l ... ames=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:26.452207Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 9 at tablet 9437184 2026-01-10T12:17:26.452493Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047436579:max} readable: {1768047436579:max} at tablet 9437184 2026-01-10T12:17:26.452620Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:17:26.452800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047436579:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:26.452904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047436579:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-10T12:17:26.453443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047436579:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-10T12:17:26.455269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047436579:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:17:26.456296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047436579:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:963:2908];trace_detailed=; 2026-01-10T12:17:26.456787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-10T12:17:26.457066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-10T12:17:26.457312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:26.457478Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:26.457786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:26.457950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:26.458080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:26.458290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:963:2908] finished for tablet 9437184 2026-01-10T12:17:26.458774Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:962:2907];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":59962834,"name":"_full_task","f":59962834,"d_finished":0,"c":0,"l":59964967,"d":2133},"events":[{"name":"bootstrap","f":59963115,"d_finished":1008,"c":1,"l":59964123,"d":1008},{"a":59964381,"name":"ack","f":59964381,"d_finished":0,"c":0,"l":59964967,"d":586},{"a":59964362,"name":"processing","f":59964362,"d_finished":0,"c":0,"l":59964967,"d":605},{"name":"ProduceResults","f":59963816,"d_finished":589,"c":2,"l":59964720,"d":589},{"a":59964729,"name":"Finish","f":59964729,"d_finished":0,"c":0,"l":59964967,"d":238}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:26.458891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:962:2907];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:26.459441Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:962:2907];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":59962834,"name":"_full_task","f":59962834,"d_finished":0,"c":0,"l":59965572,"d":2738},"events":[{"name":"bootstrap","f":59963115,"d_finished":1008,"c":1,"l":59964123,"d":1008},{"a":59964381,"name":"ack","f":59964381,"d_finished":0,"c":0,"l":59965572,"d":1191},{"a":59964362,"name":"processing","f":59964362,"d_finished":0,"c":0,"l":59965572,"d":1210},{"name":"ProduceResults","f":59963816,"d_finished":589,"c":2,"l":59964720,"d":589},{"a":59964729,"name":"Finish","f":59964729,"d_finished":0,"c":0,"l":59965572,"d":843}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:963:2908]->[1:962:2907] 2026-01-10T12:17:26.459553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:26.455239Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:17:26.459602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:26.459727Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::Update-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047982.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047982.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047982.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047982.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047982.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047982.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046782.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047982.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148047982.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046782.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046782.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046782.000000s;Name=;Codec=}; 2026-01-10T12:16:22.711770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:22.736020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:22.736250Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:22.742149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:22.742365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:22.742543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:22.742625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:22.742703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:22.742779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:22.742849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:22.742926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:22.743020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:22.743088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:22.743152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:22.743220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:22.743306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:22.764910Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:22.765271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:22.765348Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:22.765514Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:22.765676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:22.765765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:22.765801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:22.765866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:22.765908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:22.765939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:22.765960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:22.766081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:22.766120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:22.766149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:22.766170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:22.766231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:22.766278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:22.766318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:22.766341Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:22.766388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:22.766429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:22.766454Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:22.766488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:22.766514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:22.766534Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:22.766673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:22.766707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:22.766728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:22.766812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:22.766843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:22.766863Z node 1 :TX_ ... ; 2026-01-10T12:17:26.438909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=101; 2026-01-10T12:17:26.438947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8304; 2026-01-10T12:17:26.438992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8419; 2026-01-10T12:17:26.439053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-10T12:17:26.439135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=36; 2026-01-10T12:17:26.439171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9068; 2026-01-10T12:17:26.439307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=89; 2026-01-10T12:17:26.439420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2026-01-10T12:17:26.439538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=79; 2026-01-10T12:17:26.439642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=67; 2026-01-10T12:17:26.444583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4885; 2026-01-10T12:17:26.449278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4610; 2026-01-10T12:17:26.449371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:17:26.449422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:17:26.449464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:26.449543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-10T12:17:26.449585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:17:26.449677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2026-01-10T12:17:26.449719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:17:26.449794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2026-01-10T12:17:26.449876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2026-01-10T12:17:26.450209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=298; 2026-01-10T12:17:26.450251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29173; 2026-01-10T12:17:26.450378Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:26.450485Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:26.450554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:26.450636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:26.470114Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:26.470264Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:26.470355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:17:26.470422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047132543;tx_id=18446744073709551615;;current_snapshot_ts=1768047384022; 2026-01-10T12:17:26.470466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:26.470512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:26.470550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:26.470638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:26.470874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.036000s; 2026-01-10T12:17:26.472739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:26.473037Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:26.473091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:26.473199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:17:26.473268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047132543;tx_id=18446744073709551615;;current_snapshot_ts=1768047384022; 2026-01-10T12:17:26.473330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:26.473375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:26.473414Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:26.473504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:26.473809Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.141000s; 2026-01-10T12:17:26.473863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::Update-QueryService+UseSink >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047986.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047986.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047986.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047986.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047986.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047986.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046786.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047986.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148047986.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046786.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046786.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046786.000000s;Name=;Codec=}; 2026-01-10T12:16:26.728468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:26.761026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:26.761247Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:26.768526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:26.768772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:26.769010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:26.769141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:26.769242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:26.769367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:26.769500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:26.769619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:26.769741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:26.769847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.769948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:26.770051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:26.770164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:26.802162Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:26.802509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:26.802572Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:26.802760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.802921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:26.803024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:26.803072Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:26.803174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:26.803260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:26.803318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:26.803371Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:26.803548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:26.803613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:26.803656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:26.803685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:26.803781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:26.803854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:26.803915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:26.803949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:26.804001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:26.804047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:26.804101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:26.804149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:26.804193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:26.804227Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:26.804421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:26.804470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:26.804501Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:26.804627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:26.804674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:26.804 ... 9; 2026-01-10T12:17:31.085679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=95; 2026-01-10T12:17:31.085717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8868; 2026-01-10T12:17:31.085759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8981; 2026-01-10T12:17:31.085822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:17:31.085896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=34; 2026-01-10T12:17:31.085934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9599; 2026-01-10T12:17:31.086066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=84; 2026-01-10T12:17:31.086198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=83; 2026-01-10T12:17:31.086321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=77; 2026-01-10T12:17:31.086418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2026-01-10T12:17:31.092159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5675; 2026-01-10T12:17:31.097477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5193; 2026-01-10T12:17:31.097588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:17:31.097642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:17:31.097682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:31.097762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=47; 2026-01-10T12:17:31.097809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:17:31.097900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2026-01-10T12:17:31.097944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:17:31.098004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-10T12:17:31.098095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2026-01-10T12:17:31.098413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=269; 2026-01-10T12:17:31.098452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31314; 2026-01-10T12:17:31.098566Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:31.098660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:31.098710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:31.098770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:31.119404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:31.119545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:31.119635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:17:31.119699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047136556;tx_id=18446744073709551615;;current_snapshot_ts=1768047388035; 2026-01-10T12:17:31.119740Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:31.119785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.119822Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.119909Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:31.120170Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.177000s; 2026-01-10T12:17:31.121785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:31.121918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:31.121991Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:31.122083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-10T12:17:31.122145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047136556;tx_id=18446744073709551615;;current_snapshot_ts=1768047388035; 2026-01-10T12:17:31.122188Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:31.122231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.122268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.122356Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:31.122733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.136000s; 2026-01-10T12:17:31.122777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2033:3853];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168048002.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=168048002.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148048002.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148048002.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046802.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=148046802.000000s;Name=;Codec=}; 2026-01-10T12:16:43.941616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:43.971670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:43.971940Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:43.978274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:43.978433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:43.978574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:43.978648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:43.978724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:43.978795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:43.978854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:43.978912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:43.979007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:43.979077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:43.979140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:43.979194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:43.979256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:43.998469Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:43.998838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:43.998893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:43.999084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:43.999255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:43.999346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:43.999417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:43.999520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:43.999592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:43.999625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:43.999645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:43.999787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:43.999846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:43.999873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:43.999894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:43.999959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:44.000001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:44.000035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:44.000054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:44.001073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:44.001160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:44.001215Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:44.001262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:44.001307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:44.001332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:44.001529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:44.001585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:44.001607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:44.001718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:44.001751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:44.001772Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:44.001811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:44.001841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:44.001862Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:44.001896Z node 1 :TX_COLUM ... rd;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-10T12:17:31.644268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=89; 2026-01-10T12:17:31.644302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3474; 2026-01-10T12:17:31.644344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3617; 2026-01-10T12:17:31.644397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:17:31.644460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=26; 2026-01-10T12:17:31.644503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4144; 2026-01-10T12:17:31.644629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=73; 2026-01-10T12:17:31.644732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2026-01-10T12:17:31.644859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=86; 2026-01-10T12:17:31.644957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=60; 2026-01-10T12:17:31.646922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1906; 2026-01-10T12:17:31.648994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1962; 2026-01-10T12:17:31.649060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-10T12:17:31.649110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-10T12:17:31.649146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:31.649213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2026-01-10T12:17:31.649250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:17:31.649346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2026-01-10T12:17:31.649383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:17:31.649443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-10T12:17:31.649516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2026-01-10T12:17:31.649759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=210; 2026-01-10T12:17:31.649798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18602; 2026-01-10T12:17:31.649937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:31.650038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:31.650086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:31.650145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:31.660565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:31.660694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:31.660803Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:31.660869Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047141472;tx_id=18446744073709551615;;current_snapshot_ts=1768047429359; 2026-01-10T12:17:31.660912Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:31.660961Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.661012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.661093Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:31.661286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.196000s; 2026-01-10T12:17:31.663174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:31.663259Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:31.663312Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:31.663411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:31.663481Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047141472;tx_id=18446744073709551615;;current_snapshot_ts=1768047429359; 2026-01-10T12:17:31.663523Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:31.663575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.663610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:31.663694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:31.664258Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.121000s; 2026-01-10T12:17:31.664311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29053, MsgBus: 7293 2026-01-10T12:17:25.340558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705954688970180:2230];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.340639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.601722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.671934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.672103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.674158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:25.747951Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705954688969986:2081] 1768047445329672 != 1768047445329675 2026-01-10T12:17:25.752620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.789589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:25.816630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:25.816654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:25.816664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:25.816748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:26.235779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.285026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.339137Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.426700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.602467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.679928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.528270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705967573873751:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.528406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.528730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705967573873761:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.528792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.850231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.878916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.908524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.937999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.966707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.001616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.035839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.086725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.163745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705971868841929:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.163819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.163885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705971868841934:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.164165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705971868841936:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.164216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.167769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.177988Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705971868841937:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:29.259172Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705971868841989:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.339788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705954688970180:2230];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.339842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12513, MsgBus: 3117 2026-01-10T12:17:25.295988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705955480424795:2099];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.297236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.597952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.604957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.605076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.607829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:25.703544Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.786128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:25.786155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:25.786164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:25.786267Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:25.815768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.197582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.206639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:26.267488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.320298Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.400918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.549161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.607595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.733940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705968365328489:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.734081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.734427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705968365328499:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.734477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.044567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.074013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.106734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.135067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.165491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.196657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.228431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.287543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.351960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972660296670:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.352016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.352144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972660296675:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.352183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972660296676:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.352202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.355889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.366126Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705972660296679:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:29.429396Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705972660296730:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.297623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705955480424795:2099];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.297689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestSchema::RebootColdTiers [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:29.910966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:29.911062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.911104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:29.911147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:29.911226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:29.911284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:29.911361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:29.911451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:29.912497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:29.912849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:30.054894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:30.054987Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:30.056016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:30.073906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:30.074543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:30.074738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:30.084148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:30.084591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:30.085507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:30.086033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:30.089342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:30.089573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:30.090988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:30.091051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:30.091098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:30.091140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:30.091176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:30.091335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.099334Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:30.291505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:30.291783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.292009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:30.292071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:30.292400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:30.292494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:30.296022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:30.296285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:30.296521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.296623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:30.296691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:30.296734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:30.299957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.300040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:30.300124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:30.302084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.302138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:30.302202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.302271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:30.313935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:30.316608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:30.316832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:30.318050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:30.318198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:30.318251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.318591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:30.318662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:30.318846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:30.318926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:30.321231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-10T12:17:30.404532Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-10T12:17:30.404569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-10T12:17:30.404607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-10T12:17:30.404638Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-10T12:17:30.404669Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-10T12:17:30.406910Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.407007Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.407044Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:30.407079Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-10T12:17:30.407115Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-10T12:17:30.408894Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.409036Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.409085Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:30.409134Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-10T12:17:30.409182Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-10T12:17:30.411097Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.411217Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.411260Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:30.411303Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-10T12:17:30.411349Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-10T12:17:30.413255Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.413429Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:30.413486Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:30.413529Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-10T12:17:30.413575Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-10T12:17:30.413683Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-10T12:17:30.418823Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:30.419271Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:30.422062Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:30.422291Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-10T12:17:30.424268Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-10T12:17:30.424326Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-10T12:17:30.426536Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-10T12:17:30.426710Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.426761Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:5322:6874] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-10T12:17:30.428484Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-10T12:17:30.428546Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-10T12:17:30.428651Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-10T12:17:30.428686Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-10T12:17:30.428762Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-10T12:17:30.428796Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-10T12:17:30.428873Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-10T12:17:30.428904Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-10T12:17:30.428991Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-10T12:17:30.429024Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-10T12:17:30.431714Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-10T12:17:30.431983Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-10T12:17:30.432140Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.432203Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:5325:6877] 2026-01-10T12:17:30.432551Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-10T12:17:30.432652Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.432692Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:5325:6877] 2026-01-10T12:17:30.432954Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-10T12:17:30.433107Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.433148Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:5325:6877] 2026-01-10T12:17:30.433347Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-10T12:17:30.433460Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.433500Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:5325:6877] 2026-01-10T12:17:30.433704Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-10T12:17:30.433743Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:5325:6877] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21879, MsgBus: 21752 2026-01-10T12:17:25.853372Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705955492280571:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.853453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:26.091976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:26.133786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:26.133897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:26.150888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:26.249427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:26.257402Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705955492280542:2081] 1768047445851755 != 1768047445851758 2026-01-10T12:17:26.307646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:26.307669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:26.307677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:26.307770Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:26.328172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.789866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.804543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:26.857706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.865980Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.999377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.149736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.212848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.033150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972672151605:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.033266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.033781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972672151615:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.033841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.404152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.437402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.466491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.496322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.528805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.577618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.610927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.649629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.711585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972672152487:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.711653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.711657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972672152492:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.711856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972672152494:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.711942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.715229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.725859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705972672152495:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:29.799475Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705972672152547:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.853530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705955492280571:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.853598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10059, MsgBus: 22618 2026-01-10T12:17:26.699321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705961579016890:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:26.699368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:26.994481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:27.017198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:27.017301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:27.018738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:27.031172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:27.122226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:27.122248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:27.122275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:27.122376Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:27.251498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:27.530808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:27.595609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.704895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:27.716651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.836560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.900169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.538888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705974463920617:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.539040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.539345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705974463920627:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.539406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.859068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.889939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.921700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.952726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.980347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.011572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.058402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.096234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.161240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705978758888793:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.161348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.161543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705978758888798:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.161579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.161603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705978758888799:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.165004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:30.174725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705978758888802:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:30.272663Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705978758888853:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:31.700516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705961579016890:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:31.702707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047990.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047990.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047990.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047990.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047990.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168047990.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046790.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047990.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148047990.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046790.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046790.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046790.000000s;Name=;Codec=}; 2026-01-10T12:16:30.504029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:30.542376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:30.542662Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:30.551095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:30.551459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:30.551767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:30.551947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:30.552118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:30.552283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:30.552412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:30.552547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:30.552694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:30.552818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.552960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:30.553079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:30.553209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:30.593743Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:30.594033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:30.594099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:30.594235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:30.594369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:30.594440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:30.594475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:30.594577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:30.594634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:30.594667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:30.594693Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:30.594856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:30.594917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:30.594969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:30.594991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:30.595075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:30.595134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:30.595174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:30.595198Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:30.595236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:30.595263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:30.595283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:30.595317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:30.595360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:30.595392Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:30.595652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:30.595717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:30.595756Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:30.595894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:30.595942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.595975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:30.596026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:30.596067Z node 1 :TX_COLUMNSHARD WARN: l ... =12; 2026-01-10T12:17:33.092672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=83; 2026-01-10T12:17:33.092706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3089; 2026-01-10T12:17:33.092748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3187; 2026-01-10T12:17:33.092801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:17:33.092909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=27; 2026-01-10T12:17:33.092949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3756; 2026-01-10T12:17:33.093112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=100; 2026-01-10T12:17:33.093226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2026-01-10T12:17:33.093378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=89; 2026-01-10T12:17:33.093498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=70; 2026-01-10T12:17:33.095532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1980; 2026-01-10T12:17:33.097741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2130; 2026-01-10T12:17:33.097819Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:17:33.097869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-10T12:17:33.097911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-10T12:17:33.098042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2026-01-10T12:17:33.098114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2026-01-10T12:17:33.098239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2026-01-10T12:17:33.098290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:17:33.098368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2026-01-10T12:17:33.098460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-10T12:17:33.098774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=280; 2026-01-10T12:17:33.098811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18680; 2026-01-10T12:17:33.098956Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:33.099083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:33.099132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:33.099200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:33.119370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:33.119507Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:33.119583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:33.119631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047140138;tx_id=18446744073709551615;;current_snapshot_ts=1768047428025; 2026-01-10T12:17:33.119668Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:33.119719Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:33.119759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:33.119846Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:33.120043Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.070000s; 2026-01-10T12:17:33.121678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:33.122145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:33.122185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:33.122252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:33.122307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047140138;tx_id=18446744073709551615;;current_snapshot_ts=1768047428025; 2026-01-10T12:17:33.122340Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:33.122369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:33.122394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:33.122463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:33.122666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.142000s; 2026-01-10T12:17:33.122702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9636, MsgBus: 61557 2026-01-10T12:17:26.478711Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705959503599665:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:26.479081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:26.720246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:26.727927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:26.728064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:26.816589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:26.852919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705959503599563:2081] 1768047446474132 != 1768047446474135 2026-01-10T12:17:26.855921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:26.896150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.976359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:26.976380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:26.976395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:26.976504Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:27.359695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:27.415630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.486453Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:27.533319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.673474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.727791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.815760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972388503322:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.815894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.816233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705972388503332:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.816296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.114843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.148143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.178192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.208355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.237844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.270942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.309050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.359344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.439850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705976683471501:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.439944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.440265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705976683471507:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.440326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705976683471506:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.440366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.443868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:30.455813Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705976683471510:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:30.528168Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705976683471563:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:31.477615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705959503599665:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:31.477686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::Upsert-QueryService+UseSink >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpQueryPerf::Replace-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7119, MsgBus: 11866 2026-01-10T12:17:27.932419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705962950370245:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:27.937883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:28.193050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:28.193165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:28.222903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:28.238003Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705962950370215:2081] 1768047447929801 != 1768047447929804 2026-01-10T12:17:28.254057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:28.262015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:28.360735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:28.360756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:28.360772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:28.360859Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:28.388219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:28.740054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:28.796136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.911037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.996638Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:29.034879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.106713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.135874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980130241271:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.135991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.136341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980130241281:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.136393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.513409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.546774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.578372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.609478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.640883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.680771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.714956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.767241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.841024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980130242151:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.841117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.841165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980130242156:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.841452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980130242158:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.841511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.845220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:31.857218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705980130242159:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:31.959510Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705980130242211:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:32.933566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705962950370245:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:32.933639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> KqpQueryPerf::Delete-QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24925, MsgBus: 61999 2026-01-10T12:17:28.223661Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705968027637200:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:28.224228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:28.476882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:28.477043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:28.523422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:28.570102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:28.570802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:28.573287Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705968027637172:2081] 1768047448221812 != 1768047448221815 2026-01-10T12:17:28.636764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:28.636789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:28.636796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:28.636939Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:28.803196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:29.062410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:29.074619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:29.122952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.237284Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:29.263267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.438606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.502881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.242163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980912540931:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.242258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.242698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705980912540943:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.242745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.644304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.677435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.721785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.755818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.790902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.823439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.883107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.928970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.004257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705985207509108:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.004340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.004523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705985207509113:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.004567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705985207509115:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.004610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.009125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:32.022367Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705985207509117:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:32.114420Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705985207509168:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:33.223849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705968027637200:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:33.223929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047987.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047987.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046787.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-10T12:16:28.812118Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:28.835457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:28.835718Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:28.849166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:28.849429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:28.849683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:28.849816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:28.849945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:28.850083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:28.850208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:28.850348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:28.850492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:28.850622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.850733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:28.850839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:28.850953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:28.880013Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:28.880387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:28.880452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:28.880625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.880819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:28.880910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:28.880957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:28.881071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:28.881149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:28.881202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:28.881236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:28.881397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:28.881459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:28.881498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:28.881531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:28.881627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:28.881682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:28.881729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:28.881760Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:28.881811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:28.881868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:28.881913Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:28.881965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:28.882004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:28.882038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:28.882243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:28.882315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:28.882350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:28.882465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:28.882509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.882539Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:28.882586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:28.882630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:28.882685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:28.882753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ent=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:35.391008Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:2254;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.391047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:17:35.391148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=2254; 2026-01-10T12:17:35.391183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=2254;batch_columns=timestamp; 2026-01-10T12:17:35.391361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1235:3173];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1236:3174]->[1:1235:3173] 2026-01-10T12:17:35.391483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.391579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.391669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.391782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:17:35.391849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.391918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.392111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1236:3174] finished for tablet 9437184 2026-01-10T12:17:35.392650Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1235:3173];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.031},{"events":["l_task_result"],"t":0.166},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.167}],"full":{"a":68306257,"name":"_full_task","f":68306257,"d_finished":0,"c":0,"l":68474104,"d":167847},"events":[{"name":"bootstrap","f":68306463,"d_finished":792,"c":1,"l":68307255,"d":792},{"a":68473710,"name":"ack","f":68337576,"d_finished":57841,"c":71,"l":68473634,"d":58235},{"a":68473694,"name":"processing","f":68307348,"d_finished":124753,"c":143,"l":68473636,"d":125163},{"name":"ProduceResults","f":68306956,"d_finished":100690,"c":216,"l":68473877,"d":100690},{"a":68473881,"name":"Finish","f":68473881,"d_finished":0,"c":0,"l":68474104,"d":223},{"name":"task_result","f":68307359,"d_finished":64681,"c":72,"l":68472695,"d":64681}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:17:35.392741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1235:3173];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:17:35.393094Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1235:3173];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.031},{"events":["l_task_result"],"t":0.166},{"events":["l_ProduceResults","f_Finish"],"t":0.167},{"events":["l_ack","l_processing","l_Finish"],"t":0.168}],"full":{"a":68306257,"name":"_full_task","f":68306257,"d_finished":0,"c":0,"l":68474724,"d":168467},"events":[{"name":"bootstrap","f":68306463,"d_finished":792,"c":1,"l":68307255,"d":792},{"a":68473710,"name":"ack","f":68337576,"d_finished":57841,"c":71,"l":68473634,"d":58855},{"a":68473694,"name":"processing","f":68307348,"d_finished":124753,"c":143,"l":68473636,"d":125783},{"name":"ProduceResults","f":68306956,"d_finished":100690,"c":216,"l":68473877,"d":100690},{"a":68473881,"name":"Finish","f":68473881,"d_finished":0,"c":0,"l":68474724,"d":843},{"name":"task_result","f":68307359,"d_finished":64681,"c":72,"l":68472695,"d":64681}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1236:3174]->[1:1235:3173] 2026-01-10T12:17:35.393166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:17:35.223271Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10565848;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10565848;selected_rows=0; 2026-01-10T12:17:35.393194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:17:35.393319Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18516, MsgBus: 3979 2026-01-10T12:17:29.672644Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705971433749146:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:29.672754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:29.876819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:29.899075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:29.899190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:29.925084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:30.004118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:30.007245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705971433749117:2081] 1768047449670837 != 1768047449670840 2026-01-10T12:17:30.060511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:30.060528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:30.060533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:30.060596Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:30.161956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:30.482837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:30.543029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.679717Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:30.686775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.840550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.902638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.837304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705984318652875:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.837418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.838420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705984318652885:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:32.838479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.197919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.232531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.263414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.291504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.323306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.358426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.416496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.458396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.536379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705988613621050:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.536462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.536686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705988613621056:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.536718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.536717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705988613621055:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.539864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:33.550454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705988613621059:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:33.618321Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705988613621110:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:34.672762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705971433749146:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:34.672847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=168047991.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047991.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047991.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047991.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047991.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168047991.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=168047991.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046791.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148047991.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148047991.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046791.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046791.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=148046791.000000s;Name=;Codec=}; 2026-01-10T12:16:32.358188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:32.380874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:32.381071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:32.387591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:32.387776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:32.387972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:32.388058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:32.388170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:32.388276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:32.388378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:32.388498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:32.388598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:32.388670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:32.388734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:32.388797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:32.388873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:32.413137Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:32.413528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:32.413592Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:32.413774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:32.413979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:32.414070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:32.414117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:32.414197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:32.414257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:32.414299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:32.414323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:32.414453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:32.414511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:32.414554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:32.414591Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:32.414696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:32.414773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:32.414847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:32.414883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:32.414944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:32.414983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:32.415006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:32.415062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:32.415090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:32.415112Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:32.415245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:32.415296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:32.415314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:32.415399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:32.415426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:1 ... 026-01-10T12:17:36.478262Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=114; 2026-01-10T12:17:36.478306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9310; 2026-01-10T12:17:36.478352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9431; 2026-01-10T12:17:36.478414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-10T12:17:36.478500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=40; 2026-01-10T12:17:36.478537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10110; 2026-01-10T12:17:36.478691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=101; 2026-01-10T12:17:36.478847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=100; 2026-01-10T12:17:36.479018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=107; 2026-01-10T12:17:36.479162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=93; 2026-01-10T12:17:36.484950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5700; 2026-01-10T12:17:36.489929Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4848; 2026-01-10T12:17:36.490056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-10T12:17:36.490113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:17:36.490159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:36.490247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2026-01-10T12:17:36.490298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:17:36.490382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=49; 2026-01-10T12:17:36.490426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:17:36.490512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=48; 2026-01-10T12:17:36.490626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=69; 2026-01-10T12:17:36.490983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=312; 2026-01-10T12:17:36.491033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=32118; 2026-01-10T12:17:36.491174Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:36.491297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:36.491357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:36.491429Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:36.510659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:36.510825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:36.510919Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-10T12:17:36.510992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047142191;tx_id=18446744073709551615;;current_snapshot_ts=1768047393670; 2026-01-10T12:17:36.511038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:36.511088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:36.511125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:36.511214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:36.511453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.183000s; 2026-01-10T12:17:36.512964Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:36.519367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:36.519430Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:36.519532Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-10T12:17:36.519599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047142191;tx_id=18446744073709551615;;current_snapshot_ts=1768047393670; 2026-01-10T12:17:36.519636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:36.519683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:36.519722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:36.519798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:36.520210Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.100000s; 2026-01-10T12:17:36.520252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31260, MsgBus: 2283 2026-01-10T12:17:30.297348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705977167799572:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.297392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:30.546176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:30.546304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:30.589591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:30.609157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:30.664824Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705977167799543:2081] 1768047450295157 != 1768047450295160 2026-01-10T12:17:30.665295Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:30.731645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:30.731696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:30.731704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:30.731796Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:30.811948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:31.129056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:31.201325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.309880Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:31.331746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.500241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.567992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.426360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705990052703312:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.426500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.426961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705990052703322:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.427010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.797113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.830262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.859240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.885937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.911750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.944772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.009522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.053805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.132434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705994347671487:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.132533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.132535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705994347671492:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.132791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705994347671494:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.132830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.136899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:34.149071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705994347671495:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:34.222151Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705994347671547:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:35.298339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705977167799572:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:35.298415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3429, MsgBus: 14018 2026-01-10T12:17:24.422431Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705952811753111:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:24.422505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:24.614959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:24.641677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:24.641808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:24.677426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:24.724072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705952811753086:2081] 1768047444421707 != 1768047444421710 2026-01-10T12:17:24.729291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:24.767921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:24.775790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:24.775827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:24.775838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:24.775956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:25.160602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:25.217727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:25.357208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:25.460416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:25.523525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:25.595574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.502908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705965696656845:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:27.503017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:27.503338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705965696656855:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:27.503403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:27.799575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.827646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.855138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.882602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.909093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.942943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.009454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.049031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.126266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969991625019:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.126353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.126714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969991625025:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.126763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969991625024:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.126809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.131467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:28.141356Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705969991625028:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:28.222651Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705969991625079:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:29.422434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705952811753111:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:29.422490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31121, MsgBus: 20273 2026-01-10T12:17:31.062930Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705981186425702:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:31.062993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:31.083792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:31.153239Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:31.154814Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705981186425674:2081] 1768047451062012 != 1768047451062015 2026-01-10T12:17:31.185053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:31.185114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:31.189298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:31.232510Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:31.232527Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:31.232535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:31.232593Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:31.376985Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:31.572409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:31.586508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.643019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.837808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.903633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.068945Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:34.231185Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705994071329428:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.231269Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.231651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705994071329437:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.231709Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.301618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.333137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.366487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.396684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.429570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.490547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.529329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.577098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.654599Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705994071330309:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.654671Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705994071330314:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.654684Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.654919Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705994071330316:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.655059Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.658025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:34.669110Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705994071330317:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:34.722808Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705994071330369:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:36.063196Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705981186425702:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.063264Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::ComputeLength-QueryService [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:13.418826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:13.418918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.418957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:13.418995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:13.419021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:13.419043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:13.419107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:13.419201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:13.419920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:13.420175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:13.486223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:13.486274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:13.497459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:13.497546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:13.497678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:13.503787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:13.503956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:13.504491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:13.504692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:13.506335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.506472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:13.507310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:13.507350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:13.507392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:13.507421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:13.507451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:13.507682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:13.610307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.610964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:13.611768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:37.850471Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:37.850507Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:37.850536Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:17:37.850573Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:37.850650Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:17:37.851517Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1252 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:37.851569Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:37.851716Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1252 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:37.851858Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1252 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:17:37.852564Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 77309414025 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:37.852612Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:37.852725Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 77309414025 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:37.852783Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:37.852885Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 711 RawX2: 77309414025 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:37.852952Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:37.852996Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:37.853045Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:37.853098Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:37.855308Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:37.857252Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:37.857363Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:37.857452Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:37.857704Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:37.857742Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:37.857844Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:37.857882Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:37.857921Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:37.857952Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:37.857988Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:37.858048Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:740:2718] message: TxId: 101 2026-01-10T12:17:37.858095Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:37.858139Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:37.858169Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:37.858276Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:37.859848Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:37.859897Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:741:2719] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:37.860415Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:37.860589Z node 18 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 212us result status StatusSuccess 2026-01-10T12:17:37.861097Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18267, MsgBus: 3621 2026-01-10T12:17:25.120622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705955980754384:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.121664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.336197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.355278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.355367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.364155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:25.461921Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705955980754355:2081] 1768047445119115 != 1768047445119118 2026-01-10T12:17:25.472935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.511762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:25.585137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:25.585164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:25.585177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:25.585309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:25.971098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.035365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.127797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.140322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.288513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.357505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.389992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705968865658120:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.390105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.390429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705968865658129:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.390465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.728884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.761139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.795694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.829380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.860963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.896059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.927675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.001422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.073990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973160626296:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.074071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.074265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973160626301:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.074296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973160626302:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.074384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.078780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.088045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705973160626305:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:29.190176Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705973160626358:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.120861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705955980754384:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.120921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26370, MsgBus: 63070 2026-01-10T12:17:31.988462Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705980241217356:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:31.990408Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:32.040180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:32.094608Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:32.118572Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:32.118672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:32.138104Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:32.188612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:32.188636Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:32.188643Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:32.188721Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:32.288627Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:32.584803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:32.599948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.660648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.800614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:32.903490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.025362Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:35.375900Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705997421088344:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.375993Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.376284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705997421088353:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.376328Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.460565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.492589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.527222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.570638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.617409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.656651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.727460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.781609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.873887Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705997421089222:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.873996Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.874047Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705997421089227:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.874305Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705997421089229:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.874402Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:35.877778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:35.888558Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705997421089230:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:35.949884Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705997421089282:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:36.989532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705980241217356:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.990587Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14132, MsgBus: 20794 2026-01-10T12:17:26.441906Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705958629929117:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:26.441969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:26.681417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:26.693410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:26.693526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:26.778660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:26.813848Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:26.839337Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705958629929093:2081] 1768047446441264 != 1768047446441267 2026-01-10T12:17:26.862286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.942957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:26.942990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:26.942999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:26.943081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:27.310553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:27.370627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.455779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:27.474226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.615690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:27.680277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.527089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705971514832865:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.527206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.527501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705971514832875:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.527547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.832275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.865869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.891651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.921747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.951142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.981700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.010843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.079887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.148168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705975809801038:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.148232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.148282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705975809801043:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.148394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705975809801045:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.148423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:30.152783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:30.164172Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705975809801047:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:30.254845Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705975809801098:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:31.442209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705958629929117:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:31.442295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12903, MsgBus: 2123 2026-01-10T12:17:33.312062Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705989127804091:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:33.312137Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:33.325189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:33.382226Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:33.433725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:33.433823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:33.437462Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:33.500675Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:33.500700Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:33.500708Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:33.500783Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:33.600290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:33.869617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:33.886771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.933534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.103453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.157166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.318082Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:36.641238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706002012707811:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.641348Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.641608Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706002012707821:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.641676Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.718792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.750400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.788626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.816942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.880230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.920929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.968802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.018855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.112093Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706006307675987:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:37.112231Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:37.112322Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706006307675992:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:37.112466Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706006307675994:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:37.112555Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:37.116786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:37.131674Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706006307675995:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:37.235379Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706006307676047:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:38.316344Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705989127804091:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:38.316415Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 12382, MsgBus: 2372 2026-01-10T12:17:25.596449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705956291816775:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.596892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.869873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.869991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.888778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.909515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.912131Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705956291816672:2081] 1768047445577694 != 1768047445577697 2026-01-10T12:17:25.921004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:26.019760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:26.019784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:26.019792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:26.019907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:26.181685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.412234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.464543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.600760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.635742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.803733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.882598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.853553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969176720432:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.853696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.855316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969176720442:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.855439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.200618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.231204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.257969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.286937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.315910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.344724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.413135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.455022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.526191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973471688617:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.526293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.526570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973471688622:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.526621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705973471688623:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.526887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.530607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.542107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705973471688626:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:29.634224Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705973471688679:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.591490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705956291816775:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.591557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63899, MsgBus: 24355 2026-01-10T12:17:32.849205Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705984688252149:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:32.849392Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:32.874948Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:32.944703Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:32.947505Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705984688252111:2081] 1768047452847541 != 1768047452847544 2026-01-10T12:17:32.982094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:32.982185Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:32.990936Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:33.026306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:33.026329Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:33.026335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:33.026407Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:33.081962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:33.376168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:33.398148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.462774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.610761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.674597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.864213Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:36.011470Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706001868123165:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.011574Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.011843Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706001868123175:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.011881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.137348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.178137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.225703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.257016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.300498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.340552Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.381725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.470191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.580456Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706001868124049:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.580581Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.582593Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706001868124055:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.582637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706001868124054:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.582655Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:36.587259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:36.601086Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706001868124058:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:36.692561Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706001868124109:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:37.849710Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705984688252149:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:37.849780Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23212, MsgBus: 10157 2026-01-10T12:17:27.971018Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705964176466399:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:27.971092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:28.236189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:28.241997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:28.242108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:28.296709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:28.363941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:28.368297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705964176466374:2081] 1768047447970297 != 1768047447970300 2026-01-10T12:17:28.424838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:28.446384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:28.446410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:28.446418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:28.446545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:28.842435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:28.900576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.985499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:29.008056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.160450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.237865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.236381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705981356337430:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.236509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.236855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705981356337440:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.236927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.571540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.604742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.634674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.666227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.707362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.747051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.787325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.835956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.915309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705981356338308:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.915393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.915464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705981356338313:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.915856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705981356338316:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.915927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:31.919268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:31.934432Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705981356338315:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:32.010973Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705985651305666:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:32.971585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705964176466399:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:32.971666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9330, MsgBus: 1628 2026-01-10T12:17:34.788940Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593705993248751509:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:34.788997Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:34.802169Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:34.874703Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:34.915744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:34.915828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:34.917876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:34.970529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:34.970561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:34.970569Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:34.970655Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:35.024614Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:35.389103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:35.394990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:35.401082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.452857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.603061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.702780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.823584Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:38.173597Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706010428622492:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.173680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.174077Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706010428622501:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.174121Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.246523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.280416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.316318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.350041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.381574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.426359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.465696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.542974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.627528Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706010428623374:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.627843Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706010428623379:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.627848Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.628250Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706010428623382:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.628362Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.631773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:38.643932Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706010428623381:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:38.696589Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706010428623434:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:39.791875Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705993248751509:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:39.791946Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> KqpQueryPerf::MultiRead+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18848, MsgBus: 18609 2026-01-10T12:17:35.125482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705998065950171:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:35.125533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:35.401456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:35.401586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:35.447429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:35.454315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:35.512838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:35.514508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705998065950142:2081] 1768047455121469 != 1768047455121472 2026-01-10T12:17:35.611031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:35.611052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:35.611061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:35.611145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:35.702147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:36.053272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:36.062461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:36.107876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.138143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:36.266936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.432621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.502493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.433010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706010950853902:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.433176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.433717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706010950853912:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.433783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.758853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.797577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.836460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.866152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.902407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.937446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.968616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.016910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.084956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015245822080:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.085047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.085265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015245822085:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.085269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015245822086:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.085298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.088844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:39.099827Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706015245822089:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:39.185102Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706015245822142:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:40.125578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705998065950171:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.125644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1403, MsgBus: 28690 2026-01-10T12:17:36.171667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706002749231066:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.171860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:36.392244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:36.456761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:36.456940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:36.481711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:36.504184Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706002749231041:2081] 1768047456155764 != 1768047456155767 2026-01-10T12:17:36.512334Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:36.568685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:36.568705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:36.568712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:36.568793Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:36.661598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:37.001191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:37.065804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.192200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:37.232070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.412420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.481428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.398197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015634134802:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.398350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.398853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015634134812:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.398975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.688590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.719957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.750294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.778392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.805878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.840866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.897736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.944113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.027280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706019929102980:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.027367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.027604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706019929102985:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.027658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706019929102986:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.027715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.032270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:40.044658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706019929102989:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:40.108473Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706019929103040:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:41.168637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706002749231066:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.168723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25586, MsgBus: 26356 2026-01-10T12:17:35.890717Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705998722294508:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:35.891608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:36.175296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:36.175407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:36.221380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:36.228182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705998722294404:2081] 1768047455881758 != 1768047455881761 2026-01-10T12:17:36.237878Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:36.238670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:36.359936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:36.359957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:36.359965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:36.360029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:36.452185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:36.799990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:36.807165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:36.863210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.915993Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:37.021432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.205050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.287065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.323124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015902165468:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.323311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.323794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015902165478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.323858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.606503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.636928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.667072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.693918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.722873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.765863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.803901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.861715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.942296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015902166353:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.942511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.942726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015902166358:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.942782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015902166360:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.942846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.946356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:39.961604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706015902166362:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:40.053368Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706020197133709:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:40.885831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705998722294508:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.885897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> Cdc::SequentialSplitMerge >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168047990.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148047990.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046790.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-10T12:16:31.547693Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:31.591410Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:31.591694Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:31.599926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:31.600220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:31.600484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:31.600639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:31.600756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:31.600911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:31.601051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:31.601175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:31.601339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:31.601483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.601606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:31.601724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:31.601850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:31.640538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:31.641015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:31.641082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:31.641275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:31.641466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:31.641560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:31.641606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:31.641713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:31.641785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:31.641855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:31.641904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:31.642083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:31.642155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:31.642200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:31.642232Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:31.642334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:31.642391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:31.642439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:31.642472Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:31.642527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:31.642590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:31.642634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:31.642691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:31.642735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:31.642783Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:31.643011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:31.643066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:31.643098Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:31.643223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:31.643278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.643309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:31.643360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:31.643403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:31.643438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:31.643510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... mposite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-10T12:17:42.878680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=118; 2026-01-10T12:17:42.878719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9553; 2026-01-10T12:17:42.878765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9670; 2026-01-10T12:17:42.878834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:17:42.878906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=31; 2026-01-10T12:17:42.878948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10334; 2026-01-10T12:17:42.879099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=89; 2026-01-10T12:17:42.879226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2026-01-10T12:17:42.879346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=79; 2026-01-10T12:17:42.879443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=60; 2026-01-10T12:17:42.883017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3520; 2026-01-10T12:17:42.887252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4140; 2026-01-10T12:17:42.887385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2026-01-10T12:17:42.887443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:17:42.887524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:42.887607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2026-01-10T12:17:42.887666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:17:42.887742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-10T12:17:42.887798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=14; 2026-01-10T12:17:42.887862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-10T12:17:42.887940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2026-01-10T12:17:42.888188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=215; 2026-01-10T12:17:42.888232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26523; 2026-01-10T12:17:42.888352Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=21099992;raw_bytes=29608900;count=3;records=320000} evicted {blob_bytes=10565848;raw_bytes=16084450;count=1;records=160000} at tablet 9437184 2026-01-10T12:17:42.888492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:42.888542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:42.888604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:42.898655Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:42.898845Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:42.898935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:42.899011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047149297;tx_id=18446744073709551615;;current_snapshot_ts=1768047392703; 2026-01-10T12:17:42.899052Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:42.899098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:42.899132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:42.899212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:42.899468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.001000s; 2026-01-10T12:17:42.900958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:42.901244Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:42.901317Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:42.901397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:17:42.901455Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047149297;tx_id=18446744073709551615;;current_snapshot_ts=1768047392703; 2026-01-10T12:17:42.901561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:42.901609Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:42.901645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:42.901741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:42.902196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.067000s; 2026-01-10T12:17:42.902279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1703:3538];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system >> KqpQueryPerf::Replace+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4598, MsgBus: 10537 2026-01-10T12:17:37.065912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706007062063247:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:37.066021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:37.333198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:37.339862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:37.340118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:37.391486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:37.485688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:37.554298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:37.554331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:37.554339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:37.554437Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:37.599352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:38.006476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:38.015116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:38.059248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.072600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:38.218962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.359866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.438198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.483244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706019946966963:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.483388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.483753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706019946966973:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.483801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.825738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.864625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.901159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.938171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.980592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.020340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.092601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.143420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.228896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706024241935139:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.229028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.229267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706024241935144:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.229275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706024241935146:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.229314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.233416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:41.244800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706024241935148:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:41.340371Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706024241935199:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:42.066228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706007062063247:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:42.066294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:33.303510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:33.303618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:33.303664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:33.304104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:33.304159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:33.304229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:33.304302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:33.304395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:33.305295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:33.305625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:33.462738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:33.462812Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:33.463688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:33.493003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:33.493466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:33.493666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:33.520861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:33.521377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:33.522136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:33.522554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:33.546359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:33.546609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:33.547891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:33.547965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:33.548039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:33.548100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:33.548157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:33.548338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.563860Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:33.793093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:33.793568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.793880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:33.793958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:33.794620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:33.794757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:33.800306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:33.800572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:33.800837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.800901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:33.800946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:33.800994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:33.810643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.810727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:33.810768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:33.813116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.813181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:33.813223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:33.813284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:33.817099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:33.821784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:33.822033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:33.823171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:33.823337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:33.823392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:33.823711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:33.823771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:33.823952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:33.824054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:33.832437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-10T12:17:42.138686Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-10T12:17:42.138735Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-10T12:17:42.138784Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-10T12:17:42.138831Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-10T12:17:42.138871Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-10T12:17:42.141727Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.141884Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.141948Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:42.142004Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-10T12:17:42.142060Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-10T12:17:42.144138Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.144304Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.144368Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:42.144423Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-10T12:17:42.144485Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-10T12:17:42.146184Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.146318Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.146375Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:42.146422Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-10T12:17:42.146472Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-10T12:17:42.149314Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.149445Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:17:42.149508Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:17:42.149561Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-10T12:17:42.149616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 2 2026-01-10T12:17:42.149729Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-10T12:17:42.154312Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:42.154505Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:42.157365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:17:42.157602Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-10T12:17:42.159604Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-10T12:17:42.159667Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-10T12:17:42.161989Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-10T12:17:42.162157Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.162219Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:2940:4929] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-10T12:17:42.164037Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-10T12:17:42.164144Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-10T12:17:42.164280Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-10T12:17:42.164323Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-10T12:17:42.164412Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-10T12:17:42.164453Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-10T12:17:42.164575Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-10T12:17:42.164619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-10T12:17:42.164716Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-10T12:17:42.164755Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-10T12:17:42.167450Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-10T12:17:42.167671Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.167731Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:2943:4932] 2026-01-10T12:17:42.168280Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-10T12:17:42.168680Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.168734Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:2943:4932] 2026-01-10T12:17:42.168884Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-10T12:17:42.169158Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.169212Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:2943:4932] 2026-01-10T12:17:42.169356Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-10T12:17:42.169609Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.169658Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:2943:4932] 2026-01-10T12:17:42.169776Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-10T12:17:42.170051Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-10T12:17:42.170103Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:2943:4932] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17928, MsgBus: 25221 2026-01-10T12:17:25.503979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705956920733482:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.504050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.742455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.801530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.801672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.837794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:25.895006Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.980576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:25.980610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:25.980619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:25.980735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:26.013991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:26.402093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.408562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:26.449050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.520372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:26.606835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.777172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:26.846097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.692179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969805637208:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.692306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.692615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705969805637218:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.692665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.044566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.075531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.111214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.149745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.179129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.209950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.242312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.292404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:29.370286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705974100605386:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.370417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.370581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705974100605391:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.370631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705974100605393:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.370922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:29.374725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:29.384410Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705974100605395:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:29.462811Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705974100605446:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.504466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705956920733482:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.504528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:31.200055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... :17:34.709620Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:34.709794Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705995840409579:2081] 1768047454591907 != 1768047454591910 2026-01-10T12:17:34.710580Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:34.735638Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:34.794886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:34.794908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:34.794913Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:34.794970Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:34.875473Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:35.194686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:35.206350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.265655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.422809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.487841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:35.674359Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:38.076953Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706013020280633:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.077042Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.077278Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706013020280643:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.077306Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.136821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.169646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.208821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.279979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.317231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.360224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.397240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.441696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.521616Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706013020281515:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.521690Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.521777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706013020281520:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.521852Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706013020281522:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.521901Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.524775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:38.533722Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706013020281524:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:38.624875Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706013020281575:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:39.592935Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705995840409606:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:39.593028Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:40.306115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.359065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.405175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7054, MsgBus: 62476 2026-01-10T12:17:30.109682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705977975156298:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.111606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:30.320271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:30.323417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:30.323592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:30.383345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:30.425263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:30.428219Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705977975156245:2081] 1768047450104915 != 1768047450104918 2026-01-10T12:17:30.490322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:30.490351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:30.490358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:30.490489Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:30.605449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:30.902215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:30.959912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.074217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.176400Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:31.228664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.299266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.436869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705990860060008:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.436984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.437388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705990860060018:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.437462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.768966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.802784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.839265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.869557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.902245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.948557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.979285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.026112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:34.119531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705995155028194:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.119615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.119797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705995155028200:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.119827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705995155028199:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.119890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:34.123733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:34.133622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705995155028203:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:34.231442Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705995155028256:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:35.109333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705977975156298:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:35.109407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18859, MsgBus: 24755 2026-01-10T12:17:37.220864Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706007270382853:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:37.220925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:37.238931Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:37.360356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:37.363888Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:37.366354Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706007270382825:2081] 1768047457219290 != 1768047457219293 2026-01-10T12:17:37.374765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:37.374841Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:37.394561Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:37.497049Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:37.497073Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:37.497085Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:37.497190Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:37.612817Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:37.910830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:37.930060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.992818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.129508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.190982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.313065Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:40.697172Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706020155286579:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.697295Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.697608Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706020155286589:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.697672Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.754236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.786749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.818359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.851624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.918705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.952324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.994715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.044730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.133775Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706024450254759:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.133873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.134038Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706024450254764:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.134089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706024450254765:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.134366Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.137414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:41.147053Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706024450254768:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:41.227191Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706024450254819:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:42.220994Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706007270382853:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:42.221087Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2026-01-10T12:10:18.272217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:18.272296Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:18.511118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:19.731042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:19.731114Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:19.813365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:26.105481Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:26.105549Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:26.228609Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:27.447151Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:27.447230Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:27.577579Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:29.019392Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:29.019490Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:29.156741Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:31.182611Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:31.182702Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:31.309806Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:32.819402Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:32.819508Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:33.012627Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:34.715428Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:34.715525Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:34.838136Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:36.874560Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:36.874665Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:37.041807Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:39.051952Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:39.052065Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:39.281170Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:41.935445Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:41.935530Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:42.070279Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:44.643830Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:44.643922Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:44.785957Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:47.680540Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:47.680638Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:47.812952Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:50.203937Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:50.204041Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:50.345787Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:52.870912Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:52.871007Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:52.981752Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:55.420192Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:55.420289Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:55.541279Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:10:57.918951Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:10:57.919057Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:10:58.038930Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:00.274721Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:00.274842Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:00.441915Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:03.017817Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:03.017931Z node 24 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:03.134238Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:03.727435Z node 24 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1240: Unexpected config sender died for subscription id=1 2026-01-10T12:11:04.440684Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:04.440787Z node 25 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:04.551357Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:05.910531Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:05.910610Z node 26 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:06.023256Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:07.338778Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:11:07.338869Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:07.456756Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:14.225611Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:11:14.225724Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:40.242156Z node 27 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2026-01-10T12:15:41.500875Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:15:41.501035Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:41.661473Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:48.778457Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:15:48.778551Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:44.801065Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:44.801170Z node 29 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:44.900562Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> KqpQueryPerf::Replace+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5097, MsgBus: 31232 2026-01-10T12:17:38.522344Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706014116702693:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:38.522384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:38.752192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:38.764246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:38.764383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:38.767276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:38.839264Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:38.904953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:38.904978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:38.904989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:38.905096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:39.006155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:39.331950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:39.389751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.525486Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:39.606990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.775811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.848265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.768471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706027001606415:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.768626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.769488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706027001606425:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.769684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.052379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.089954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.119146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.150669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.185525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.224396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.275432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.314274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.384616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706031296574592:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.384709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.384785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706031296574597:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.384943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706031296574599:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.385017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.389386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:42.402452Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706031296574601:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:42.468677Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706031296574652:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:43.523168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706014116702693:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.523240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23088, MsgBus: 14607 2026-01-10T12:13:57.679930Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705061692432779:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:57.680395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:58.063030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:58.162475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:58.162598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:58.198725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:58.237170Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:58.248473Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705061692432621:2081] 1768047237633496 != 1768047237633499 2026-01-10T12:13:58.267343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:58.356831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:58.356860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:58.356872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:58.357022Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:58.675609Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:58.985662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:59.013030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:59.115373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.375809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.600678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.707784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.217322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705083167270981:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.217505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.218036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705083167270991:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.218114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.616293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.658540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.678073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705061692432779:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:02.678123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:02.699029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.775024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.823354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.865134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.912756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.979491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.096570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087462239156:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.096673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.097812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087462239161:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.097891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087462239162:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.098234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.102806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:03.118807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705087462239165:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:14:03.206657Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705087462239219:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:05.107323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:10.882427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:10.981213Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705632209120427:2081] 1768047370867172 != 1768047370867175 2026-01-10T12:16:10.997377Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:11.002205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:11.002316Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:11.017527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:11.084576Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:11.102188Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:11.102216Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:11.102224Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:11.102328Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:11.767828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:11.784623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:11.868227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:11.873381Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:12.072301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:12.140971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.586032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705649388991483:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.586136Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.586433Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705649388991493:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.586491Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.648644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.677281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.705505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.737495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.770093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.805587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.843375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.900484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:14.982552Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705649388992364:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.982620Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705649388992369:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.982632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.982784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705649388992371:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.982823Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:14.986496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:14.997361Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705649388992372:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:16:15.072180Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705653683959720:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:15.868522Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705632209120456:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:15.868629Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:16.791477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:25.981027Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:16:25.981066Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:11.755473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:11.755572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:11.755608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:11.755652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:11.755691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:11.755721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:11.755793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:11.755894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:11.756853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:11.757136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:11.832744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:11.832795Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:11.843899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:11.843996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:11.844167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:11.851721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:11.851966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:11.852788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:11.853006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:11.855288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.855463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:11.856674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:11.856741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:11.856816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:11.856865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:11.856917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:11.857191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:11.973429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:11.974910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-10T12:17:45.910373Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.910475Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:45.910556Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:45.910622Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:45.912937Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.913033Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:45.913113Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:45.915206Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.915275Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.915364Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:45.915454Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:45.915700Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:45.917574Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:45.917803Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:45.918290Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:45.918478Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 115964119152 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:45.918565Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:45.918955Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:45.919053Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:45.919398Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:45.919518Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:45.921997Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:45.922077Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:45.922451Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:45.922528Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [27:210:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:17:45.923080Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.923173Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:45.923403Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:45.923482Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:45.923556Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:45.923625Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:45.923692Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:45.923777Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:45.923856Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:45.923914Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:45.924036Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:45.924158Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:45.924236Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:45.925128Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:45.925331Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:45.925432Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:45.925512Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:45.925601Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:45.925748Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:45.929466Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:45.930244Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:45.931814Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [27:673:2662] Bootstrap 2026-01-10T12:17:45.934202Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [27:673:2662] Become StateWork (SchemeCache [27:678:2667]) 2026-01-10T12:17:45.938472Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:45.939164Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:45.939375Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-10T12:17:45.940201Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-10T12:17:45.941668Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [27:673:2662] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:45.945635Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:45.946122Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:45.946836Z node 27 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23124, MsgBus: 15657 2026-01-10T12:17:39.832062Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706017928797645:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:39.832171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:40.087136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:40.104773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:40.104852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:40.175958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706017928797616:2081] 1768047459829433 != 1768047459829436 2026-01-10T12:17:40.186252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:40.188946Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:40.313297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:40.313348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:40.313363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:40.313477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:40.315886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:40.759283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:40.809587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.846365Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:40.977798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.132324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.194345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.088567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706035108668683:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.088689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.089009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706035108668693:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.089055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.440066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.475038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.510131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.543375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.578488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.616820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.656721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.720230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.790717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706035108669566:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.790770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706035108669571:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.790785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.791005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706035108669574:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.791053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.793666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:43.803209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706035108669573:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:43.897387Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706035108669628:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:44.832362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706017928797645:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:44.832433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TA] $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Update+QueryService+UseSink >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous >> KqpQueryPerf::RangeLimitRead+QueryService >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:18.103355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:18.103460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:18.103509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:18.103551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:18.103586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:18.103614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:18.103698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:18.103777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:18.104644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:18.104911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:18.167417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:18.167457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:18.180054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:18.180211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:18.180404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:18.188106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:18.188328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:18.189057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:18.189304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:18.191600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.191787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:18.192979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:18.193038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:18.193106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:18.193149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:18.193203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:18.193467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:18.320007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.320808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.320947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:18.321754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-10T12:17:46.706367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.706532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.706665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.706765Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.707057Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.707363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.708760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.708870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710225Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710464Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710691Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710787Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710874Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.710952Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.711019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.711124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:46.711190Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:46.711352Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:46.711407Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:46.711458Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:46.711503Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:46.711550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:46.711635Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:3107:4327] message: TxId: 101 2026-01-10T12:17:46.711695Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:46.711769Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:46.711812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:46.713335Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-10T12:17:46.716707Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:46.716772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:3108:4328] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:46.717467Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:46.717785Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 349us result status StatusSuccess 2026-01-10T12:17:46.718555Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1444, MsgBus: 28148 2026-01-10T12:17:40.855645Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706019994785704:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.855693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:41.107301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:41.127298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:41.127444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:41.220239Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706019994785680:2081] 1768047460853781 != 1768047460853784 2026-01-10T12:17:41.233145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:41.237006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:41.323734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:41.325650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:41.325686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:41.325695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:41.325784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:41.776817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:41.789126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:41.851611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.868238Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:42.037992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.193318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.262814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.136997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706037174656741:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.137259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.137722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706037174656751:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.137790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.476672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.510211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.543899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.573046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.602166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.634281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.672135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.727487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.821013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706037174657621:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.821084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.821297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706037174657626:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.821340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706037174657627:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.821566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.824952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:44.835389Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706037174657630:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:44.918005Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706037174657683:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:45.856212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706019994785704:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.856298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> GroupWriteTest::ByTableName >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> KqpQueryPerf::Upsert-QueryService-UseSink >> GroupWriteTest::SimpleRdma >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27550, MsgBus: 21593 2026-01-10T12:17:29.904540Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705975209023019:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:29.904610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:30.165278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:30.165405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:30.192855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:30.231464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:30.251587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:30.253027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705975209022990:2081] 1768047449902847 != 1768047449902850 2026-01-10T12:17:30.321070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:30.321100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:30.321112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:30.321242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:30.392477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:30.761953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:30.811864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:30.913172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:30.943919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.108136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:31.190073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.087450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705992388894045:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.087587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.087945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705992388894055:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.088026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.372855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.405548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.441554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.490454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.526526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.564229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.597169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.652461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:33.722914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705992388894924:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.723059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.723202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705992388894929:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.723243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705992388894930:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.723290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:33.728163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:33.739277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705992388894933:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:33.806162Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705992388894984:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:34.904676Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705975209023019:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:34.904765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:35.556354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 2026-01-10T12:17:39.233452Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:39.276284Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:39.276374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:39.280609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:39.343835Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:39.343858Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:39.343867Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:39.343955Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:39.387601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:39.836402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:39.841433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:39.854310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.911150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.077683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.160547Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:40.187473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.496541Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706030652386891:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.496643Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.496896Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706030652386900:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.496931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.580492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.610029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.640529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.673661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.716599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.752535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.790936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.844482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.929647Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706030652387768:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.929770Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.929852Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706030652387773:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.930106Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706030652387775:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.930165Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.932842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:42.943517Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706030652387776:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:43.043154Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706034947355124:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:44.149811Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706017767483168:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:44.149885Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:44.747521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.794397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.832360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink >> GroupWriteTest::WithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9121, MsgBus: 3239 2026-01-10T12:17:36.258420Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706002875714621:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.258490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:36.312982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:36.580050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:36.580215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:36.601243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:36.623203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:36.687129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:36.688101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706002875714596:2081] 1768047456256811 != 1768047456256814 2026-01-10T12:17:36.753799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:36.753825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:36.753839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:36.753949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:36.836397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:37.253789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:37.261781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:37.284289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:37.325112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.561379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.718960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.788462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.625859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015760618355:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.625971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.626309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015760618365:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.626387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.969478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.001655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.039676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.074542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.112130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.155614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.201350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.256292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.357781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706020055586532:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.357902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.358247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706020055586537:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.358358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706020055586538:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.358663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.362337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:40.373972Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706020055586541:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:40.434223Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706020055586592:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:41.261839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706002875714621:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.261919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15423, MsgBus: 27648 2026-01-10T12:17:43.068551Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706032983104789:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.068828Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:43.086972Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:43.133542Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706032983104667:2081] 1768047463062888 != 1768047463062891 2026-01-10T12:17:43.171156Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:43.209921Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:43.210006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:43.238333Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:43.274405Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:43.274429Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:43.274437Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:43.274511Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:43.274937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:43.666967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:43.682676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.748780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.923773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.986721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.161340Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:46.379696Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706045868008417:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.379790Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.380355Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706045868008427:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.380488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.455479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.497867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.541174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.584169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.631554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.667894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.707859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.773226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.883757Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706045868009294:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.883834Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.884404Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706045868009299:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.884460Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706045868009300:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.884765Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.889778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:46.906599Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706045868009303:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:46.991045Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706045868009354:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:48.066970Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706032983104789:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.067071Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25146, MsgBus: 2229 2026-01-10T12:17:43.207224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706034864013955:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.208323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:43.477240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:43.477405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:43.502711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:43.531445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:43.590758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:43.664224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:43.664253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:43.664260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:43.664380Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:43.741780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:44.101908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:44.167556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.241423Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:44.332884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.497045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.568499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.667876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706047748917655:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.667989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.668323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706047748917665:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.668365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:46.972932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.002043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.038789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.072228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.110959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.153806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.198317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.255514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.330837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052043885834:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.330934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.331077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052043885839:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.331130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052043885840:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.331200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.334988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:47.346986Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706052043885843:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:47.407659Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706052043885894:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:48.207448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706034864013955:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.207531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:22.294143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:22.294221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:22.294269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:22.294315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:22.294353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:22.294396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:22.294469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:22.294541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:22.295365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:22.295639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:22.377481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:22.377567Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:22.391879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:22.392188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:22.392346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:22.398112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:22.398549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:22.399134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:22.399914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:22.402936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:22.403092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:22.403848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:22.403904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:22.404012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:22.404048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:22.404104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:22.404282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:22.549291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.550908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:22.551647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... tifyTxCompletionResult 2026-01-10T12:17:50.065221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1745:3646] 2026-01-10T12:17:50.065479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-10T12:17:50.180727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-10T12:17:50.181417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-10T12:17:50.181588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-10T12:17:50.181658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.181774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-10T12:17:50.182491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-10T12:17:50.182538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.182592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-10T12:17:50.287091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-10T12:17:50.287188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:50.287321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:50.287475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409549, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1768060733218985 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-10T12:17:50.287574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409548, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1768060733218985 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-10T12:17:50.288484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:17:50.288631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:17:50.288923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:50.288976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-10T12:17:50.289869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-10T12:17:50.289908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:17:50.293048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:50.293186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:50.293235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-10T16:58:53.218985Z, at schemeshard: 72057594046678944 2026-01-10T12:17:50.293329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-10T12:17:50.293394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:50.293503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-10T12:17:50.293569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-10T16:58:53.218985Z, at schemeshard: 72057594046678944 2026-01-10T12:17:50.293611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-10T12:17:50.315468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:50.360647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-10T12:17:50.360843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-10T12:17:50.360919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-10T12:17:50.361016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.361161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-10T12:17:50.361349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-10T12:17:50.361401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.361470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-10T12:17:50.395459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:17:50.461360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-10T12:17:50.461518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-10T12:17:50.461579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-10T12:17:50.461656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.461771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-10T12:17:50.461965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-10T12:17:50.462017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:17:50.462067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16788, MsgBus: 22349 2026-01-10T12:17:37.128114Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706008037675468:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:37.128406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:37.418674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:37.418819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:37.421402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:37.428786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:37.527899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:37.550102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706008037675336:2081] 1768047457116432 != 1768047457116435 2026-01-10T12:17:37.632653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:37.643807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:37.643835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:37.643853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:37.644031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:38.098651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:38.131998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:38.161303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.339928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.484271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.555025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.408120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706020922579112:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.408266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.408636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706020922579122:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.408726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.735440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.766948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.805949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.837883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.870956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.912326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.952715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.027349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.128686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706025217547291:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.128793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.129329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706025217547296:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.129422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706025217547297:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.129483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.134589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:41.156161Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706025217547300:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:41.217656Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706025217547353:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:42.127647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706008037675468:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:42.127719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9594, MsgBus: 15177 2026-01-10T12:17:44.006783Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706036462808278:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:44.006841Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:44.023296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:44.097525Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706032167840957:2081] 1768047464005512 != 1768047464005515 2026-01-10T12:17:44.105093Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:44.125509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:44.125599Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:44.134982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:44.194639Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:44.194666Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:44.194674Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:44.194767Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:44.212532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:44.604687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:44.615900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.684918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.835060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.898693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.031075Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:47.281082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706049347712014:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.281171Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.281470Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706049347712024:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.281518Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.352790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.384667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.420016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.460967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.497534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.535884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.577074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.653343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.734880Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706049347712892:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.734966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.735284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706049347712897:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.735329Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706049347712898:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.735548Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.739584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:47.751813Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706049347712901:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:47.837656Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706049347712952:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:49.008180Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706036462808278:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:49.008260Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=168048005.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=168048005.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168048005.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168048005.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148048005.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=168048005.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=168048005.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046805.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148048005.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148048005.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=148046805.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=148046805.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=148046805.000000s;Name=;Codec=}; 2026-01-10T12:16:45.514197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:45.550920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:45.551175Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:45.558761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:45.559045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:45.559293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:45.559447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:45.559578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:45.559723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:45.559850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:45.559981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:45.560140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:45.560257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:45.560365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:45.560478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:45.560603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:45.594647Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:45.595033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:45.595093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:45.595277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:45.595437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:45.595529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:45.595574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:45.595686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:45.595764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:45.595828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:45.595865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:45.596031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:45.596120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:45.596165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:45.596213Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:45.596324Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:45.596383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:45.596437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:45.596475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:45.596525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:45.596564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:45.596597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:45.596642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:45.596680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:45.596711Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:45.596913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:45.596966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:45.597002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:45.597124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:45.597166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:45.597222Z nod ... ; 2026-01-10T12:17:51.001168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=102; 2026-01-10T12:17:51.001206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9209; 2026-01-10T12:17:51.001250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9343; 2026-01-10T12:17:51.001308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:17:51.001384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=37; 2026-01-10T12:17:51.001416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9943; 2026-01-10T12:17:51.001570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=109; 2026-01-10T12:17:51.001721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2026-01-10T12:17:51.001860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=88; 2026-01-10T12:17:51.001989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=82; 2026-01-10T12:17:51.007602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5542; 2026-01-10T12:17:51.012831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5121; 2026-01-10T12:17:51.012932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:17:51.012985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-10T12:17:51.013027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:17:51.013095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2026-01-10T12:17:51.013134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:17:51.013210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2026-01-10T12:17:51.013249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:17:51.013331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=46; 2026-01-10T12:17:51.013444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=66; 2026-01-10T12:17:51.013857Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=355; 2026-01-10T12:17:51.013904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31686; 2026-01-10T12:17:51.014032Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:17:51.014148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:17:51.014202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:17:51.014268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:17:51.037984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-10T12:17:51.038143Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:51.038234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-10T12:17:51.038299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047155341;tx_id=18446744073709551615;;current_snapshot_ts=1768047406820; 2026-01-10T12:17:51.038343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:51.038390Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:51.038427Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:51.038519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:51.038749Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.028000s; 2026-01-10T12:17:51.052486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:17:51.060524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:17:51.060624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:17:51.060744Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-10T12:17:51.060817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047155341;tx_id=18446744073709551615;;current_snapshot_ts=1768047406820; 2026-01-10T12:17:51.060861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:17:51.060922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:51.060967Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:17:51.061070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:17:51.061597Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.182000s; 2026-01-10T12:17:51.061638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> HashShuffle::BackPressureInMemoryLoad [GOOD] >> HashShuffle::BackPressureWithSpilling [GOOD] >> HashShuffle::BackPressureWithSpillingLoad >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17080, MsgBus: 25997 2026-01-10T12:17:43.951968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706034257109120:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.952061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:44.196414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:44.200715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:44.200815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:44.268585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:44.271771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:44.286133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706034257108995:2081] 1768047463946192 != 1768047463946195 2026-01-10T12:17:44.344039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:44.344063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:44.344071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:44.344181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:44.434645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:44.804637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:44.858091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.965123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:45.022806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.199245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.291450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.259601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706051436980066:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.259739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.260123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706051436980076:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.260204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.594459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.635861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.678184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.716719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.749117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.788789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.847728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.893040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.984227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706051436980951:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.984382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.984687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706051436980956:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.984754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706051436980957:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.984914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.989935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:48.007739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706051436980960:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:48.079177Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706055731948307:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:48.952178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706034257109120:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.952244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:03.539583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:03.539673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:03.539712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:03.539765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:03.539801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:03.539827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:03.539900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:03.539968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:03.540724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:03.540947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:03.628291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:03.628365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:03.644317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:03.644465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:03.644682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:03.652690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:03.652886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:03.653460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:03.653647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:03.655659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:03.655832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:03.656970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:03.657034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:03.657094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:03.657129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:03.657163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:03.657400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:03.773004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.773832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.773932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.773998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:03.774625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -10T12:17:51.411591Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.411732Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:17:51.411833Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:17:51.411919Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:17:51.415067Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.415185Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:17:51.415278Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:17:51.417666Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.417757Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.417872Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:51.418009Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:17:51.418291Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:17:51.420956Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:17:51.421222Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:17:51.421829Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:51.422063Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 158913792112 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:51.422183Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:51.422622Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:17:51.422739Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:17:51.423145Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:51.423280Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:17:51.426000Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:51.426097Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:51.426494Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:51.426598Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:17:51.427173Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.427290Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:17:51.427548Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:51.427637Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:51.427742Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:17:51.427827Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:51.427911Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:17:51.428008Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:17:51.428175Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:17:51.428254Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:17:51.428387Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:17:51.428476Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:17:51.428550Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:17:51.429649Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:51.429844Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:17:51.429949Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:17:51.430032Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:17:51.430120Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:17:51.430278Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:17:51.435257Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:17:51.436125Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:17:51.437718Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [37:671:2661] Bootstrap 2026-01-10T12:17:51.440285Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [37:671:2661] Become StateWork (SchemeCache [37:676:2666]) 2026-01-10T12:17:51.444514Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:17:51.445189Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.445419Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-10T12:17:51.446359Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-10T12:17:51.447777Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [37:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:17:51.451764Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:17:51.452242Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-10T12:17:51.452928Z node 37 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:17:12.650120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:12.650197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:12.650227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:12.650260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:12.650288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:12.650309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:12.650360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:12.650418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:12.651057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:12.651254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:12.719279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:17:12.719339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:12.730097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:12.730202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:12.730341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:12.737992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:12.738189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:12.738754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:12.738966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:12.740683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:12.740855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:12.741657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:12.741701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:12.741754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:12.741787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:12.741816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:12.742006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:12.855559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.856939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:12.857584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:51.910831Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:17:51.910865Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:17:51.910904Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:17:51.910944Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:17:51.911021Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:17:51.912602Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1223 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:51.912649Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:51.912785Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1223 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:17:51.912907Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1223 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:17:51.914296Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 120259086987 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:51.914342Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:17:51.914456Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 120259086987 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:51.914514Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:17:51.914605Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 120259086987 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:17:51.914669Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:51.914709Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.914750Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:17:51.914792Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:17:51.915762Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:51.917759Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:17:51.917885Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.918000Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.918125Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:17:51.918170Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:17:51.918274Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:51.918312Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:51.918355Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:17:51.918389Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:51.918424Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:17:51.918490Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:739:2717] message: TxId: 101 2026-01-10T12:17:51.918545Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:17:51.918592Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:17:51.918626Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:17:51.918774Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:17:51.920680Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:17:51.920731Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:740:2718] TestWaitNotification: OK eventTxId 101 2026-01-10T12:17:51.921208Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:17:51.921430Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 268us result status StatusSuccess 2026-01-10T12:17:51.922053Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2867, MsgBus: 25500 2026-01-10T12:17:45.162286Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706043215085777:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.162328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:45.430366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:45.455438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:45.455569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:45.507363Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706043215085750:2081] 1768047465160169 != 1768047465160172 2026-01-10T12:17:45.524480Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:45.533582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:45.596669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:45.670134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:45.670170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:45.670179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:45.670264Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:46.165824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:46.175891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:46.239354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.452362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.635445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.719603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.648646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706056099989511:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.648772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.649110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706056099989521:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.649170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.974653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.022439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.061440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.103970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.145507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.188842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.234355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.284375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.366016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060394957689:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.366117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.366481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060394957694:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.366522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060394957695:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.366635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.370606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:49.388545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706060394957698:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:49.455459Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706060394957749:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:50.162306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706043215085777:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.162380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher >> KqpQueryPerf::RangeRead-QueryService [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> GroupWriteTest::Simple |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20791, MsgBus: 65119 2026-01-10T12:17:38.935005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706011223862218:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:38.935853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:39.166140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:39.166274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:39.208972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:39.211596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:39.212291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:39.214054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706011223862031:2081] 1768047458887170 != 1768047458887173 2026-01-10T12:17:39.361491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:39.361522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:39.361530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:39.361651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:39.435631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:39.750521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:39.808869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.917639Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:39.934803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.085654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.166262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.166202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706028403733102:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.166347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.166736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706028403733112:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.166769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.517977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.551588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.580650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.609329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.641291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.689366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.765218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.815005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.898893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706028403733984:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.898972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.899106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706028403733990:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.899106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706028403733989:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.899132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:42.903107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:42.921188Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706028403733993:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:42.979148Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706028403734046:3782] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:43.913662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706011223862218:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.913741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11536, MsgBus: 23458 2026-01-10T12:17:45.599372Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706043759215391:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.604350Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:45.636196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:45.730194Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:45.732276Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706043759215294:2081] 1768047465589853 != 1768047465589856 2026-01-10T12:17:45.754523Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:45.754610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:45.760529Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:45.848510Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:45.848533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:45.848540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:45.848619Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:45.934239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:46.367983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:46.377585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:46.394121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.484936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.608009Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:46.627853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.700254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.281792Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706060939086345:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.281897Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.282430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706060939086355:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.282475Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.397186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.447881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.480930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.516324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.556210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.596322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.644460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.702141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.817062Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706060939087227:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.817164Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.817491Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706060939087232:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.817545Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706060939087233:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.817810Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.821670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:49.835457Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706060939087236:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:49.935878Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706060939087287:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:50.593322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706043759215391:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.593396Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24510, MsgBus: 28531 2026-01-10T12:17:46.676819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706044579926306:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:46.677084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:46.984160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:46.986337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:46.986459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:47.015168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:47.109987Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:47.199557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:47.199582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:47.199592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:47.199710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:47.212228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:47.636438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:47.644795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:47.688213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:47.731045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.905603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.069599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.144186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.183331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061759797227:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.183479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.183863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061759797237:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.183938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.536721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.565784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.595487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.628023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.668993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.742752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.787080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.837630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.924371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061759798113:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.924448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061759798118:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.924449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.924767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061759798120:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.924818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.928189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:50.939653Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706061759798121:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:51.026344Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706066054765469:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:51.674012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706044579926306:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:51.674082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 29182, MsgBus: 17802 2026-01-10T12:13:59.392447Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705069349054590:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:59.394032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:59.810154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:59.812573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:59.826647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:59.862778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:59.960326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:00.115467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:14:00.153098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:14:00.153126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:14:00.153135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:14:00.153223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:14:00.396194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:14:00.805701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:00.829666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:14:00.885449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:01.071654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:01.259852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:01.349106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.013543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705090823892882:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.013687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.014223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705090823892892:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.014294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.382770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705069349054590:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:04.382861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:04.421175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.501990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.539882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.580046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.616072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.663099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.710127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.783135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:04.907659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705090823893771:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.907756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.910661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705090823893777:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.910744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705090823893776:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.910896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:04.915931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:04.930659Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705090823893780:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:14:04.992258Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705090823893833:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:06.737437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:46.901211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:46.985302Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:46.987499Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705789274100034:2081] 1768047406886852 != 1768047406886855 2026-01-10T12:16:47.001656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:47.001780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:47.035260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:47.088788Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:47.088818Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:47.088827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:47.088927Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:47.100859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:47.521326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:47.535479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:47.588284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:47.738394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:47.839181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:47.965830Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:50.307898Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705806453971083:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.307973Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.308163Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705806453971092:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.308199Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.384356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.413881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.447153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.476216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.506352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.540018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.574294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.621142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:50.716667Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705806453971968:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.716730Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705806453971973:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.716759Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.716913Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705806453971976:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.716953Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:50.720016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:50.731619Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705806453971975:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:16:50.830037Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705806453972028:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:51.888112Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705789274100062:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:51.888198Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:52.559634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:01.959080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:17:01.959108Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62556, MsgBus: 15586 2026-01-10T12:17:47.573110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706052564711226:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:47.573224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:47.857040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:47.861686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:47.861822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:47.890037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:47.972217Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706052564711204:2081] 1768047467571484 != 1768047467571487 2026-01-10T12:17:47.979363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:48.048134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:48.074417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:48.074443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:48.074454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:48.074557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:48.508730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:48.575496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.583720Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:48.711832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.894060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.963802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.910592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065449614978:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.910768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.911708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065449614988:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.911793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.220702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.262531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.298943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.329360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.359092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.403441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.434105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.508450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.597842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706069744583154:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.597907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.598044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706069744583159:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.598086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706069744583160:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.598116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.601023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:51.610690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706069744583163:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:51.681820Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706069744583214:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:52.572708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706052564711226:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:52.572783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14767, MsgBus: 19783 2026-01-10T12:17:35.503750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705998556700333:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:35.505122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:35.748184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:35.766657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:35.766784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:35.823602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:35.890168Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:35.892182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705998556700279:2081] 1768047455497467 != 1768047455497470 2026-01-10T12:17:35.977149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:36.000383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:36.000424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:36.000432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:36.000514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:36.521197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:36.528324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:36.588351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.704893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.856735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:36.916997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:38.926536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706011441604044:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.926705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.927163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706011441604054:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:38.927245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.253664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.290610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.324434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.360384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.397483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.436347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.517007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.565933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.643922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015736572226:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.644040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.644129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015736572231:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.644288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015736572233:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.644336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.647462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:39.656419Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706015736572234:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:39.714404Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706015736572286:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:40.500383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705998556700333:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.500451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:41.458480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ification cookie mismatch for subscription [2:7593706038218684189:2081] 1768047464651562 != 1768047464651565 2026-01-10T12:17:44.827752Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:44.827839Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:44.830130Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:44.888758Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:44.892807Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:44.892836Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:44.892848Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:44.892939Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:45.395089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:45.412251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:45.427881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.523834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.666511Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:45.748009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.817757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.372602Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706055398555243:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.372686Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.372943Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706055398555253:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.372982Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.457442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.499099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.535631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.567137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.623216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.666568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.703397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.761622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.871417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706055398556128:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.871523Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.872219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706055398556134:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.872239Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706055398556133:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.872376Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:48.876809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:48.888408Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706055398556137:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:48.946638Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706055398556188:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:49.656497Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706038218684229:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:49.656572Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:50.754513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.806728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.853463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] Test command err: Blocked 254975 time(s) emptyPops 0 Blocked 2895 time(s) emptyPops 0 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TKqpScheduler::SingleDatabasePoolQueryStructure [GOOD] >> TKqpScheduler::WeightedDatabase [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedDatabase [GOOD] |90.7%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DemandIsCutOffByLimit [GOOD] >> TKqpScheduler::AddUpdateQueries [GOOD] >> TKqpScheduler::DeleteQueries [GOOD] |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKqpScheduler::WeightedPools [GOOD] >> TKqpScheduler::WeightedQueries [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanUpdatedRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1172, MsgBus: 11024 2026-01-10T12:17:48.562918Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706055664819499:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.564277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:48.596701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:48.890200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:48.890319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:48.951375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:48.965139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:48.986937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706055664819331:2081] 1768047468549970 != 1768047468549973 2026-01-10T12:17:49.005566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:49.106483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:49.106515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:49.106529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:49.106667Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:49.225359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:49.569579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:49.572343Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:49.659663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.836062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.016384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.090535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.034758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072844690385:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.034886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.035264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072844690395:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.035566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.361408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.394155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.422099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.451632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.484445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.520874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.571402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.623192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.704144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072844691269:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.704242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.704534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072844691274:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.704590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072844691275:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.704661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.708748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:52.723994Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706072844691278:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:52.798404Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706072844691329:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:53.561570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706055664819499:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:53.561640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25947, MsgBus: 28910 2026-01-10T12:17:48.664577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706055295487531:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.664958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:48.961693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:48.974216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:48.974358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:49.005311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:49.091476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706055295487433:2081] 1768047468654473 != 1768047468654476 2026-01-10T12:17:49.106136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:49.175289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:49.175314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:49.175323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:49.175425Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:49.194409Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:49.648119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:49.659988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:49.684408Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:49.739348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.885848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.058349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.128781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.032357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072475358498:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.032452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.032750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072475358508:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.032798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.361664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.394734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.427300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.460964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.494418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.536124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.581612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.661681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.740374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072475359379:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.740480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.742280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072475359384:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.742358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706072475359385:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.742399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.745838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:52.755733Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706072475359388:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:52.841020Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706072475359439:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:53.662113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706055295487531:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:53.662188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DeleteQueries [GOOD] >> TKqpScheduler::ZeroQueries [GOOD] >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> TKqpScanData::ArrowToUnboxedValueConverter |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5818, MsgBus: 3978 2026-01-10T12:17:36.077196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706001489844583:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.077228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:36.335144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:36.342888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:36.342997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:36.381836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:36.448138Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706001489844559:2081] 1768047456070937 != 1768047456070940 2026-01-10T12:17:36.448953Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:36.610219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:36.632623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:36.632651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:36.632657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:36.632741Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:37.038788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:37.100191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:37.114220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.262863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.468626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:37.557733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.500043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706014374748320:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.500167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.500461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706014374748330:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.500542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.838719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.874151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.910569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.946772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:39.981002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.019860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.085486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.138679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.242156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706018669716494:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.242279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.243661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706018669716499:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.243700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706018669716500:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.243932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.247445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:40.259097Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706018669716503:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:40.334448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706018669716554:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:41.079144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706001489844583:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.079218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:42.142227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... ification cookie mismatch for subscription [2:7593706040174086484:2081] 1768047465944571 != 1768047465944574 2026-01-10T12:17:46.115206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:46.115297Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:46.145006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:46.285976Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:46.312638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:46.312676Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:46.312686Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:46.312782Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:46.770080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:46.777350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:46.788117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.843756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:46.976688Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:47.058078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.140635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.836956Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706057353957539:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.837082Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.837716Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706057353957549:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.837811Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.902561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.934096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.978008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.010230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.050246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.095719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.134580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.193247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.292563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706061648925720:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.292643Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.293474Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706061648925725:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.293644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706061648925726:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.293736Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.298332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:50.312558Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706061648925729:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:50.368722Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706061648925780:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:50.957461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706040174086553:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.957594Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:52.162800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.206953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.278640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TKqpScanFetcher::ScanDelayedRetry >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> TKqpScheduler::AddUpdatePools [GOOD] >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedQueries [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimits [GOOD] >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::LeftFairShareIsDistributed [GOOD] >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:50 :META:Reads { ShardId: 1001001 KeyRanges { } } 2026-01-10T12:17:57.418958Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-10T12:17:57.419048Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-10T12:17:57.429662Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-10T12:17:57.429779Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16127, MsgBus: 25731 2026-01-10T12:17:50.443088Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706062790708133:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.448112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:50.752411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:50.773382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:50.773513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:50.830878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:50.878082Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:50.881735Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706062790708101:2081] 1768047470440686 != 1768047470440689 2026-01-10T12:17:50.968967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:50.997001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:50.997024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:50.997030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:50.997130Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:51.463241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:51.563607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:51.638034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.815504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.978167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.044763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.894183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706075675611862:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.894335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.895255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706075675611872:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.895319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.225796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.263806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.302532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.343413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.388816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.428732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.469007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.538101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.612705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079970580039:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.612778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.612816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079970580044:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.612983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079970580046:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.613019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.616409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:54.626453Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706079970580048:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:54.730054Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706079970580099:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:55.442995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706062790708133:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:55.443059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] |90.8%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> GroupWriteTest::WithRead [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29363, MsgBus: 3924 2026-01-10T12:17:40.086208Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706021343333334:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.086285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:40.348225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:40.358564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:40.358685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:40.461128Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706021343333307:2081] 1768047460084007 != 1768047460084010 2026-01-10T12:17:40.471118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:40.473842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:40.577292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:40.577320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:40.577337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:40.577469Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:40.590844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:40.986237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:40.995417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:41.055049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.094322Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:41.186072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.351447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.417528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.342337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706034228237080:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.342506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.342943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706034228237090:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.342993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:43.696323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.729097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.762478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.791917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.820476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.855137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.887733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.949536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.032409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038523205256:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.032549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.033060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038523205262:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.033076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038523205261:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.033115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.037815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:44.054440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706038523205265:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:44.145372Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706038523205318:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:45.088193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706021343333334:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.088263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:46.081444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 2026-01-10T12:17:49.981356Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:49.998398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:49.998483Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:50.027843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:50.171114Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:50.171150Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:50.171160Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:50.171253Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:50.185498Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:50.669771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:50.680736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:50.695172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.767702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.898325Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:50.975236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.054373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.474112Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706075350189861:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.474228Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.474542Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706075350189871:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.474618Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.545287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.581799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.612840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.640472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.670885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.708103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.747048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.798350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.886207Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706075350190739:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.886294Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706075350190744:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.886299Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.886637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706075350190746:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.886692Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.891588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:53.907322Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706075350190747:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:53.985837Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706075350190801:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:54.868427Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706058170318848:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:54.868508Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:55.612922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.658710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.697144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7563, MsgBus: 20314 2026-01-10T12:17:40.861235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706021251996964:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:40.861303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:41.194171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:41.210601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:41.210727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:41.236275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706021251996922:2081] 1768047460859460 != 1768047460859463 2026-01-10T12:17:41.242171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:41.251172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:41.368121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:41.368175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:41.368189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:41.368311Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:41.405940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:41.775620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:41.791951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:41.857296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.869380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:42.004028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.172453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.246775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.096887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038431867981:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.096978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.097199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038431867991:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.097254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.485042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.516206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.549805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.582121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.628277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.674098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.716930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.788258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.888396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038431868866:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.888470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.888750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038431868871:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.888782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038431868872:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.888816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.892268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:44.906402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706038431868875:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:44.964507Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706038431868926:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:45.861492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706021251996964:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.861560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:46.882293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 2026-01-10T12:17:50.457701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:50.457789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:50.460235Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:50.477176Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:50.608878Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:50.608905Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:50.608913Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:50.609009Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:50.640174Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:51.123341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:51.129824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:51.142707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.211029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.343923Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:51.387882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.473731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.971558Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706077300843698:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.971671Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.971991Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706077300843708:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.972056Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.046608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.081232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.117382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.159986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.204216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.249527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.295889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.356429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.447692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706081595811878:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.447792Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.447906Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706081595811883:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.448210Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706081595811885:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.448268Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.451906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:54.462930Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706081595811887:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:54.543854Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706081595811938:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:55.335993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706064415939972:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:55.336066Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:56.211801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.254245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.315357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6923, MsgBus: 27946 2026-01-10T12:17:45.733758Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706043278250819:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:45.738565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:46.007022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:46.014066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:46.014211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:46.073819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:46.121619Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:46.259191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:46.305845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:46.305866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:46.305871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:46.305938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:46.753099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:46.816595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:46.822428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:46.879349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.028055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.204001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.280503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.319982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060458121851:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.320141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.320650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060458121861:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.320700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.587010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.634060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.670676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.709407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.745234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.787528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.831523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.879737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.967378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060458122731:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.967505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.967660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060458122736:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.967935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706060458122738:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.968007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:49.972377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:49.989980Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706060458122739:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:50.069961Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706064753090087:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:50.732200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706043278250819:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.732280Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13438, MsgBus: 5784 2026-01-10T12:17:53.244400Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706075549765386:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:53.244457Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:53.259814Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:53.347318Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:53.347853Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706075549765357:2081] 1768047473243496 != 1768047473243499 2026-01-10T12:17:53.386934Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:53.387023Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:53.390198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:53.444645Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:53.444672Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:53.444682Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:53.444759Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:53.447410Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:53.837912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:53.847337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:53.860452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.917602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.109864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.181863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.354341Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:56.492224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088434669112:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.492338Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.492695Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088434669122:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.492746Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.559382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.591300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.618799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.647465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.676458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.710132Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.750003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.801693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.879215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088434669989:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.879315Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.879322Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088434669994:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.879600Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088434669996:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.879666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.883363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:56.895039Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706088434669997:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:56.995082Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706088434670049:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:58.248376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706075549765386:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:58.249262Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 15409458738442175901 2026-01-10T12:17:52.725724Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:17:52.750649Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:17:52.750717Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:17:52.753453Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:17:52.768953Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:17:52.771558Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:18:00.200671Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:18:00.200799Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:18:00.255412Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11711, MsgBus: 18745 2026-01-10T12:17:46.968892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706044437035014:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:46.969093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:47.187031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:47.207718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:47.207859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:47.289329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:47.289777Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706044437034793:2081] 1768047466929706 != 1768047466929709 2026-01-10T12:17:47.292050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:47.393206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:47.414362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:47.414388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:47.414397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:47.414503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:47.808146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:47.890228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.967451Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:48.043073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.212847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:48.298209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.349823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061616905860:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.349941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.351955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706061616905870:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.352016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:50.690277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.725725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.767177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.806494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.844765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.899200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.946485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.999488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.113956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065911874041:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.114061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.114398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065911874046:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.114435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065911874047:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.114714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.119255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:51.132669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706065911874050:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:51.232178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706065911874101:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:51.968432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706044437035014:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:51.968504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27187, MsgBus: 23065 2026-01-10T12:17:54.237219Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706079413801084:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:54.237654Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:54.278722Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:54.377732Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:54.379338Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706079413801030:2081] 1768047474234305 != 1768047474234308 2026-01-10T12:17:54.393123Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:54.393213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:54.413596Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:54.504046Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:54.504095Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:54.504103Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:54.504198Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:54.536190Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:54.905618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:54.922806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.989359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.097873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.162964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.289776Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:57.704285Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706092298704787:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.704397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.704635Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706092298704797:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.704696Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.803071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.841399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.873061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.904792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.946899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.986064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.023341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.075559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.173290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706096593672960:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:58.173416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:58.173419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706096593672965:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:58.175925Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706096593672967:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:58.176012Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:58.177990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:58.194029Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706096593672968:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:58.274009Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706096593673020:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:59.236228Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706079413801084:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:59.236313Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63560, MsgBus: 30594 2026-01-10T12:17:41.862480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706024307297979:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.862562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:41.896120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:42.163178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:42.163318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:42.166326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:42.211242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:42.274763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:42.276295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706024307297950:2081] 1768047461861005 != 1768047461861008 2026-01-10T12:17:42.353241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:42.353277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:42.353289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:42.353386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:42.387332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:42.799389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:42.867179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.869930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:43.005336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.165335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:43.239743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.165903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706041487169009:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.166050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.166601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706041487169019:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.166660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.515115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.556399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.590441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.620338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.664901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.723094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.763940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.830198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.944250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706041487169890:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.944349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.944748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706041487169895:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.944788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706041487169896:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.944817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.949289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:45.966620Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706041487169899:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:46.042968Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706045782137246:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:46.862812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706024307297979:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:46.862911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:47.953759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... ification cookie mismatch for subscription [2:7593706066403326000:2081] 1768047471697606 != 1768047471697609 2026-01-10T12:17:51.863975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:51.864222Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:51.881078Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:51.960790Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:51.960812Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:51.960818Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:51.960911Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:52.039413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:52.385658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:52.390974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:52.406105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.486660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.672376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.745395Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:52.791448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.313098Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706083583197054:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.313207Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.314100Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706083583197063:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.314171Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.387967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.421790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.456523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.489520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.523631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.568711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.611759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.669069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.759395Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706083583197933:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.759521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.759636Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706083583197938:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.759883Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706083583197940:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.759932Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.763489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:55.775069Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706083583197941:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:55.867162Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706083583197995:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:56.699016Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706066403326028:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:56.699087Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:57.384362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.424347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.467910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3297, MsgBus: 28980 2026-01-10T12:17:41.690701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706025829947838:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.691003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:41.909340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:41.913511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:41.913615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:41.919252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:41.986255Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:42.075928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:42.078028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:42.078060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:42.078068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:42.078145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:42.503345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:42.548799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.670927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.776797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:42.828908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:42.898346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.905438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038714851554:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.905599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.906196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706038714851564:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:44.906293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.247376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.282312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.315928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.353017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.394714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.439302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.487017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.537630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.618525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706043009819730:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.618583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.618824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706043009819735:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.618861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706043009819736:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.618996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:45.623303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:45.644326Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706043009819739:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:45.740070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706043009819790:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:46.691605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706025829947838:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:46.693026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:47.770574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.807678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... 2026-01-10T12:17:51.767919Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:51.774860Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:51.775093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:51.798873Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:51.886686Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:51.886712Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:51.886720Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:51.886796Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:51.911476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:52.305888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:52.311178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:52.323534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.384777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.576698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.659481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.670449Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:55.209053Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706084294849876:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.209187Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.209631Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706084294849886:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.209706Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.282570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.319136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.351689Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.384785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.414580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.450430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.520743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.574326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:55.667024Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706084294850758:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.667126Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.667285Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706084294850763:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.667334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706084294850764:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.667379Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:55.671136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:55.682488Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706084294850767:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:55.783981Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706084294850818:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:56.664737Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706067114978865:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:56.664820Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:57.512544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.565256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.605815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> YdbProxy::MakeDirectory >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> YdbProxy::RemoveDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6603, MsgBus: 61173 2026-01-10T12:17:43.684030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706034938126254:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:43.684146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:43.717833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:43.970057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:43.970181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:43.974057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:44.021664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:44.059733Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:44.106639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:44.106667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:44.106681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:44.106790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:44.228174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:44.592151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:44.654919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.703856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:44.819226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:44.988817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:45.064801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.198558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052117997268:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.198741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.199131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052117997278:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.199194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.506945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.540012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.579145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.608044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.638973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.706247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.751369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.799497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:47.880909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052117998156:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.880993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.884170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052117998161:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.884265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.884431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706052117998163:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:47.888330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:47.903063Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706052117998165:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:47.970560Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706052117998216:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:48.688432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706034938126254:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.688519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:49.797998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshar ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:53.554381Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:53.594141Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:53.594249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:53.597484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:53.685610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:53.685644Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:53.685677Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:53.685785Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:53.688214Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:54.213371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:54.227396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.290763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.463890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.509730Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:54.537557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.979914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088077384557:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.980002Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.980248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706088077384566:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:56.980284Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.050780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.085115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.121233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.156528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.188266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.234141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.270702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.320369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.420750Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706092372352732:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.420832Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.421042Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706092372352738:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.421095Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.421125Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706092372352737:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:57.424625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:57.434367Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706092372352741:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:57.515734Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706092372352794:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:58.467838Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706075192480841:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:58.467914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:59.248574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.286974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.326690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> YdbProxy::DropTable >> YdbProxy::CreateTable >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> YdbProxy::ListDirectory >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> KqpYql::InsertCV+useSink >> KqpYql::JsonNumberPrecision >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8674, MsgBus: 5312 2026-01-10T12:17:48.421820Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706052913519090:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.421862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:48.455509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:48.745104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:48.745219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:48.798560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:48.810547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:48.838780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:48.992156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:49.000777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:49.000806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:49.000815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:49.000913Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:49.421903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:49.464450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:49.474009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:49.518366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.675687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.820735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.892204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.850155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065798422789:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.850269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.850642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706065798422799:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.850706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.159753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.194181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.233142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.273784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.309876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.348707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.394959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.462116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.542739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070093390967:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.542823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.543278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070093390972:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.543338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070093390973:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.543341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.547169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:52.560182Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706070093390976:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:17:52.618937Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706070093391027:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:53.422877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706052913519090:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:53.422961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20790, MsgBus: 26931 2026-01-10T12:17:55.838048Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706086639236075:2135];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:55.838166Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:55.857888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:55.930794Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:55.936981Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706086639235976:2081] 1768047475834793 != 1768047475834796 2026-01-10T12:17:55.938891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:55.938959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:55.962250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:56.019548Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:56.019588Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:56.019596Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:56.019677Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:56.020678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:56.418731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:56.437087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.505380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.683285Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.750848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:56.875790Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:59.192498Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706103819107027:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.192670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.192904Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706103819107036:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.192942Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.268449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.305773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.336761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.391034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.427180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.464018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.499530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.544393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:59.625853Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706103819107905:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.625950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.625967Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706103819107910:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.626165Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706103819107912:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.626224Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:59.629388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:59.640288Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706103819107913:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:59.702537Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706103819107965:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:00.838066Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706086639236075:2135];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:00.838142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> YdbProxy::CopyTable >> KqpPragma::Auth >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> KqpScripting::ScanQueryInvalid >> KqpScripting::ScriptExplainCreatedTable >> KqpYql::EvaluateExpr3 >> KqpYql::BinaryJsonOffsetNormal >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpYql::RefSelect >> KqpYql::InsertCV-useSink >> KqpScripting::UnsafeTimestampCast >> KqpYql::UuidPrimaryKeyBulkUpsert >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpYql::DdlDmlMix >> KqpScripting::SelectNullType >> KqpYql::UuidPrimaryKeyDisabled >> KqpYql::FlexibleTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 22860, MsgBus: 25588 2026-01-10T12:17:49.824743Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706058212429035:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:49.824791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:50.108207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:50.109686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:50.109788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:50.166132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:50.204531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:50.312285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:50.312307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:50.312320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:50.312426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:50.409029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:50.761656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:50.772857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:50.839523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:50.840493Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:50.969345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.137535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.217418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.341151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706075392300042:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.341284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.341842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706075392300052:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.341940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.708098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.740383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.784982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.822641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.856699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.908897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.950085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.997780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.091815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079687268222:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.091934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.092173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079687268227:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.092231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706079687268228:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.092268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.096925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:54.110631Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706079687268231:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:54.199441Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706079687268282:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:54.825169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706058212429035:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:54.825241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16709, MsgBus: 19107 2026-01-10T12:17:57.320031Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706094910300300:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:57.321042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:57.343748Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:57.414389Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:57.416479Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706094910300191:2081] 1768047477317220 != 1768047477317223 2026-01-10T12:17:57.466719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:57.466810Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:57.473846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:57.515966Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:57.515996Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:57.516005Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:57.516097Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:57.569952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:57.903457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:57.923598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:57.990025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.164435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.239749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.361314Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:00.800973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706107795203944:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.801072Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.801309Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706107795203953:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.801342Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.883705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.919350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.988432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.020860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.056998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.099024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.137773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.218250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.304197Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112090172122:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.304279Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.304433Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112090172127:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.304482Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112090172128:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.304543Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.308225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:01.319129Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706112090172131:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:01.411171Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706112090172184:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:02.319298Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706094910300300:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:02.319386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 1741, MsgBus: 29071 2026-01-10T12:13:57.993381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705062017315603:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:13:57.993443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:13:58.251492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:13:58.262968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:13:58.263100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:13:58.335069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:13:58.389621Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:13:58.390833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705062017315576:2081] 1768047237992027 != 1768047237992030 2026-01-10T12:13:58.436883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:13:58.508296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:13:58.508322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:13:58.508328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:13:58.508416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:13:59.022873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:13:59.139701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:13:59.150776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:13:59.261034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.481960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.674932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:13:59.768254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.186926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705083492153951:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.187113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.187897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705083492153961:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.188015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:02.598669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.640942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.677941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.717055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.761526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.811118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.872699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:02.958455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:14:03.006824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705062017315603:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:14:03.016607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:14:03.077151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087787122134:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.077241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.077580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087787122139:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.077631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705087787122140:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.077657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:14:03.082249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:14:03.099787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705087787122143:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:14:03.202030Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705087787122195:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:14:05.175579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 01-10T12:16:24.373941Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:16:24.387043Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:16:24.491564Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:16:24.492878Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593705694829683005:2081] 1768047384372911 != 1768047384372914 2026-01-10T12:16:24.506737Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:16:24.506852Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:16:24.525191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:16:24.567671Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:16:24.567716Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:16:24.567725Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:16:24.567821Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:16:24.679171Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:16:24.972961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:16:24.988551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:25.059619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:25.268324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:25.348125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:25.499925Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:16:28.015917Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705712009554067:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.016011Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.016308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705712009554076:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.016376Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.074812Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.102676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.131560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.185715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.217299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.248872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.282755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.334504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:28.415928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705712009554947:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.416008Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705712009554952:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.416012Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.416217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593705712009554954:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.416267Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:16:28.419620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:16:28.430946Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593705712009554955:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:16:28.499165Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593705712009555007:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:16:29.374067Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593705694829683033:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:16:29.374143Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:16:30.088539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:16:39.451163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:16:39.451185Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17826, MsgBus: 4388 2026-01-10T12:17:50.172147Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706063656461880:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:50.172318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:50.492343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:50.494252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:50.494374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:50.509321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:50.674584Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:50.675387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706063656461838:2081] 1768047470170694 != 1768047470170697 2026-01-10T12:17:50.772978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:50.772998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:50.773006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:50.773078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:50.783343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:51.191650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:51.348377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:51.405896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.575637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.766779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.845085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:53.661121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706076541365601:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.661244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.661628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706076541365611:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.661691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:53.962661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.002287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.036163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.071826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.109925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.191441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.239078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.290553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:54.388324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706080836333787:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.388414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.388743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706080836333792:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.388748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706080836333793:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.388794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:54.393004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:54.405736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706080836333796:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:54.494031Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706080836333847:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:55.172385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706063656461880:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:55.173952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1839, MsgBus: 20081 2026-01-10T12:17:57.911561Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706092683884165:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:57.911644Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:57.972013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:58.014150Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:58.027481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:58.027547Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:58.050491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:58.131660Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:58.135718Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:58.135740Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:58.135747Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:58.135846Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:58.553858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:58.558735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:58.567904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.634367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.836338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.897007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.921906Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:01.220219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706109863755183:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.220326Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.221229Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706109863755193:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.221337Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.284401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.316820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.348039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.377402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.409649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.447784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.518247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.565332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.640218Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706109863756061:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.640283Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.640335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706109863756066:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.640446Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706109863756068:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.640499Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.643837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:01.655467Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706109863756070:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:01.749974Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706109863756121:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:02.911799Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706092683884165:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:02.911874Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] |90.9%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced |90.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:35.373609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:35.373712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:35.373754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:35.373795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:35.373872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:35.373926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:35.374000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:35.374110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:35.375031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:35.375348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:35.684756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:35.684828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:35.685723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:35.727466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:35.727562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:35.727740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:35.762962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:35.763302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:35.764097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:35.764708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:35.770346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:35.770546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:35.771789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:35.771852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:35.771954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:35.771996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:35.772062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:35.772289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:35.795881Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-10T12:14:36.078153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:36.078390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.078591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:36.078635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:36.078847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:36.078940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:36.096933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:36.097186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:36.097451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.097525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:36.097580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:36.097619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:36.108981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.109081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:36.109122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:36.116876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.116954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:36.117008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.117084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:36.124779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:36.136776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:36.137032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:36.138096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:36.138270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:36.138322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.138613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:36.138670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:36.138865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:36.138956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:36.149143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:18:04.235797Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:18:04.235870Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:18:04.235917Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-10T12:18:04.235964Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 3 2026-01-10T12:18:04.236851Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 190:2 from tablet: 72057594046678944 to tablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 275382275 2026-01-10T12:18:04.245245Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:18:04.245393Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-10T12:18:04.245442Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-10T12:18:04.245494Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 4 2026-01-10T12:18:04.245543Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 4 2026-01-10T12:18:04.245673Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 2/3, is published: true 2026-01-10T12:18:04.250909Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:18:04.251144Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:18:04.254781Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:18:04.254978Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-10T12:18:04.268780Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 2026-01-10T12:18:04.268855Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 190, tablet: 72075186233409584, partId: 2 2026-01-10T12:18:04.268990Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 190:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 FAKE_COORDINATOR: Erasing txId 190 2026-01-10T12:18:04.271954Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-10T12:18:04.272258Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-10T12:18:04.272319Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 190:2 ProgressState 2026-01-10T12:18:04.272435Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-10T12:18:04.272476Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-10T12:18:04.272526Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-10T12:18:04.272560Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-10T12:18:04.272605Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 3/3, is published: true 2026-01-10T12:18:04.272681Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [31:309:2299] message: TxId: 190 2026-01-10T12:18:04.272737Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-10T12:18:04.272793Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:0 2026-01-10T12:18:04.272831Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:0 2026-01-10T12:18:04.272933Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-10T12:18:04.272976Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:1 2026-01-10T12:18:04.273005Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:1 2026-01-10T12:18:04.273047Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-10T12:18:04.273076Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:2 2026-01-10T12:18:04.273104Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:2 2026-01-10T12:18:04.273198Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-10T12:18:04.276338Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.276400Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [31:5649:7155] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-10T12:18:04.278472Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-10T12:18:04.278535Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-10T12:18:04.278645Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-10T12:18:04.278678Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-10T12:18:04.278751Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-10T12:18:04.278786Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-10T12:18:04.278862Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-10T12:18:04.278893Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-10T12:18:04.278963Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-10T12:18:04.278998Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-10T12:18:04.281286Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-10T12:18:04.281875Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-10T12:18:04.281977Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.282021Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [31:5690:7196] 2026-01-10T12:18:04.282483Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-10T12:18:04.282646Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.282689Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [31:5690:7196] 2026-01-10T12:18:04.282925Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-10T12:18:04.283082Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.283120Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [31:5690:7196] 2026-01-10T12:18:04.283277Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-10T12:18:04.283457Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.283497Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [31:5690:7196] 2026-01-10T12:18:04.283689Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-10T12:18:04.283725Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [31:5690:7196] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2733, MsgBus: 10798 2026-01-10T12:17:48.194422Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706053175224229:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:48.194670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:48.257310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:17:48.518497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:48.518587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:48.522221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:48.580657Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706053175224200:2081] 1768047468189214 != 1768047468189217 2026-01-10T12:17:48.580767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:48.596504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:48.649143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:48.649164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:48.649178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:48.649273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:48.834469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:49.106922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:49.118388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:17:49.179073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.204161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:49.334061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.495707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:49.559517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:51.650760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706066060127957:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.650894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.651319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706066060127967:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:51.651368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.058305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.090303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.121125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.154762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.189828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.246647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.289377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.341601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:52.426397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070355096140:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.426534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.426799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070355096143:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.426861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.427076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706070355096147:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:52.430217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:52.439709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706070355096149:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:17:52.515607Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706070355096200:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:53.194156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706053175224229:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:53.194229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... :17:57.600016Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:57.601603Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706094886456276:2081] 1768047477510578 != 1768047477510581 2026-01-10T12:17:57.648167Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:57.648272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:57.652410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:57.692027Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:57.692056Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:57.692065Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:57.692194Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:57.797479Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:58.196319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:58.210502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.307347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.472786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:58.528575Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:58.561146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.823178Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706107771360046:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.823280Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.823828Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706107771360055:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.823924Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:00.901619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.933087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.966413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.001910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.059526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.095486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.129383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.184153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:01.267273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112066328218:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.267401Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.267459Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112066328223:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.267854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706112066328225:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.267957Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:01.272394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:01.285358Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706112066328226:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:01.370600Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706112066328278:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:02.513059Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706094886456344:2106];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:02.513129Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:03.030889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.069496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.176358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> GroupWriteTest::SimpleRdma [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> KqpPragma::ResetPerQuery >> KqpYql::UuidPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::SimpleRdma [GOOD] Test command err: RandomSeed# 3500015321144945169 2026-01-10T12:17:51.698454Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:17:51.722720Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:17:51.722795Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:17:51.725372Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:17:51.740129Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:17:51.742786Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:18:08.609984Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:18:08.610076Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:18:08.669779Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpScripting::StreamExecuteYqlScriptData >> KqpYql::ColumnNameConflict >> YdbProxy::OAuthToken [GOOD] >> YdbProxy::StaticCreds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 21614, MsgBus: 27567 2026-01-10T12:18:05.508480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706128613911761:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.510755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.582091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.884288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.886137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.886269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.893369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.062593Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.070660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.268904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.268934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.268942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.269043Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.519608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.901015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.907193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:09.145741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145793781761:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.145850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.146217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145793781771:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.146261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.394399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.578373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145793781872:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.578446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.578695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145793781877:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.578728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.578750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145793781878:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.590042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.602843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706145793781881:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:18:09.702148Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706145793781932:2601] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] >> KqpYql::TableNameConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 25601, MsgBus: 17993 2026-01-10T12:18:06.154396Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706131853675224:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:06.154483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:06.208440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:06.527607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:06.527759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:06.611317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.613457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.617593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.619412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706131853675038:2081] 1768047486142363 != 1768047486142366 2026-01-10T12:18:06.834507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.842648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.842679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.842688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.842801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:07.162823Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.453370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:07.465580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:09.646936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577814:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.647017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.647404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577824:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.647439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.903600Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706144738577837:2506] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-10T12:18:09.936536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577845:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.936624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.936933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577848:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.936995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.952858Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706144738577855:2516] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-10T12:18:09.967556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577863:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.967631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.968015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144738577865:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.968048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.998094Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706144738577872:2525] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2026-01-10T12:18:10.020372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149033545176:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.020477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.020989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149033545179:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.021069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.035042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.149317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149033545266:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.149396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.150169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149033545268:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.150224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::FromBytes >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::JsonCast >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin >> YdbProxy::DescribeTopic [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::EvaluateExprPgNull >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> YdbProxy::DropTopic [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable >> KqpYql::InsertCV-useSink [GOOD] >> KqpYql::InsertCVList+useSink >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2026-01-10T12:18:03.842375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706117527200388:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:03.843075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.107822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:04.107946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:04.168243Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:04.168847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:04.172299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706117527200357:2081] 1768047483840545 != 1768047483840548 2026-01-10T12:18:04.178321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:04.331609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:04.452899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:04.452927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:04.452934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:04.453022Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:04.752401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:04.760764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:04.850726Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.880952Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706136095188546:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:07.881055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:07.947663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:08.133241Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:08.137989Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706136095188519:2081] 1768047487879392 != 1768047487879395 2026-01-10T12:18:08.146666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:08.146748Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:08.150786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:08.207967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:08.464862Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:08.464893Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:08.464899Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:08.465010Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:08.797412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:08.810705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:08.872054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:18:08.902260Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2026-01-10T12:18:03.994410Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706118033156172:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:03.994833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.219992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:04.244441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:04.244520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:04.264959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:04.319175Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:04.320430Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706118033156067:2081] 1768047483989659 != 1768047483989662 2026-01-10T12:18:04.381247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:04.556903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:04.556934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:04.556941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:04.557014Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:04.873686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:04.881882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:04.996615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:18:05.004482Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:05.083908Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706126623091543:2523] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-10T12:18:08.147062Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706141417032642:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:08.147243Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:08.224506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:08.340217Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706141417032533:2081] 1768047488143785 != 1768047488143788 2026-01-10T12:18:08.347037Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:08.359623Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:08.359730Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:08.363964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:08.482367Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:08.624712Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:08.624739Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:08.624750Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:08.624819Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:08.936745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:08.948477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:09.015445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:18:09.159001Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:09.173029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::EvaluateIf >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2026-01-10T12:18:04.384664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706123645780380:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.384872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.652206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:04.652320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:04.716009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:04.716333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:04.741538Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706123645780353:2081] 1768047484383402 != 1768047484383405 2026-01-10T12:18:04.755615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:04.888202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.014058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.014080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.014087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.014175Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.318559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:05.393688Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.817240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.036479Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:18:08.049527Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706140825650558:2596] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-10T12:18:08.749814Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706139913675492:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:08.751862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:08.772162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:08.856212Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706139913675381:2081] 1768047488737051 != 1768047488737054 2026-01-10T12:18:08.864051Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:08.895710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:08.895807Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:08.915215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:09.020178Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:09.151281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:09.151307Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:09.151316Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:09.151388Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:09.480643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:09.492662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:09.754115Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> KqpScripting::StreamExecuteYqlScriptMixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2026-01-10T12:18:04.630967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706123855117304:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.632126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.928106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:04.934619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:04.934750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.018371Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706123855117201:2081] 1768047484621444 != 1768047484621447 2026-01-10T12:18:05.029298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.061256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.217293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.554412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.554442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.554451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.554549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.642577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:05.956302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:05.962840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:08.947132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:08.949910Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706139338799859:2258];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:08.950107Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:09.051612Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:09.054119Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706139338799625:2081] 1768047488899504 != 1768047488899507 2026-01-10T12:18:09.072028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:09.105543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:09.105648Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:09.111487Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:09.354288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:09.399471Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:09.399496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:09.399502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:09.399569Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:09.765315Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:09.773103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:09.904464Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:09.941264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:18:10.026188Z node 2 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-10T12:18:10.029922Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:18:10.029959Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:18:10.056141Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706147928735207:2591] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> KqpYql::UpdateBadType >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2026-01-10T12:18:04.541127Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706122814720192:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.544577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.781714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:04.788858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:04.789024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:04.793845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:04.853229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.077134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.225684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.225707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.225714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.225863Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.530456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:05.544411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:05.548519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:08.237405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706139994590134:2502] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2026-01-10T12:18:08.262371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.477704Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706139994590219:2561] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:09.412191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:09.415505Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:18:09.500327Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706146577782636:2081] 1768047489338942 != 1768047489338945 2026-01-10T12:18:09.539822Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:09.546902Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:09.546995Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:09.553393Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:09.690134Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:09.816734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:09.816760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:09.816768Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:09.816855Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:10.127617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:10.136219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:10.383329Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:12.748508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:12.890706Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7593706159462685592:2333] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:18:12.924984Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706159462685643:2641] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> KqpScripting::ScanQuery >> KqpYql::EvaluateExpr1 >> KqpYql::UuidPrimaryKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 4949, MsgBus: 5932 2026-01-10T12:18:09.252570Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706146033943034:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.253174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:09.280949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:09.636261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:09.646765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:09.646922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:09.734252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:09.746156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:09.772209Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706146033942997:2081] 1768047489221822 != 1768047489221825 2026-01-10T12:18:09.878978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:09.974942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:09.974959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:09.974963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:09.975024Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:10.259798Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:10.482791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:10.499208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:12.692701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706158918845773:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.692848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.693254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706158918845783:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.693294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.988099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.122727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706163213813172:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.122820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.123123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706163213813177:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.123134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706163213813178:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.123185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.127242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:13.145319Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706163213813181:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:18:13.269254Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706163213813232:2597] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:14.244344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706146033943034:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:14.244406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::QueryStats >> KqpPragma::OrderedColumns >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> GroupWriteTest::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogram 2026-01-10 12:18:12,305 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:18:12,572 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 127043 58.6M 58.3M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/003b31/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args 127506 2.0G 1.9G 1.5G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/003b31/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:08:18.122112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:18.244841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:18.253555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:08:18.254065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:08:18.254166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:08:18.741817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:18.862045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:18.862215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:18.900615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:18.991178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:08:19.953814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:08:19.954596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:08:19.954665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:08:19.954711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:08:19.955594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:08:20.056699Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:08:20.763555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:08:24.748309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:08:24.757787Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:08:24.762422Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:08:24.837951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:24.838083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:24.883445Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:08:24.890735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.100219Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.100987Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.104103Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.105192Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.105349Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.105667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.105887Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.106068Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.106279Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:08:25.357055Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:08:25.393798Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:08:25.393925Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:08:25.418063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:08:25.700672Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:08:25.767371Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:08:25.767482Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:08:25.817537Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:08:25.819498Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:08:25.819759Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:08:25.819828Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:08:25.819887Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:08:25.819941Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:08:25.820014Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:08:25.821932Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:08:25.822913Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:08:26.013475Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:26.013587Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:08:26.053882Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:08:26.054291Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:08:26.441580Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:08:26.442061Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:08:26.448311Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:08:26.452216Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:08:26.452356Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:08:26.462123Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:08:26.466028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:08:26.512991Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:08:26.513168Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:08:26.549722Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:08:26.871183Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:08:27.148889Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:08:27.280859Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:08:27.280956Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:08:28.035229Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 94046644480 2026-01-10T12:09:05.234713Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:09:05.234811Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:09:05.235144Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:09:05.260064Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:09:05.506537Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:09:05.511034Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:09:05.515123Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1` 2026-01-10T12:09:05.515351Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], Start read next stream part 2026-01-10T12:09:05.527564Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4526:4039], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:05.527691Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4537:4044], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:05.528296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:05.529405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4542:4049], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:05.529552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:09:05.536115Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4558:4053], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:09:05.539731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:09:05.611872Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4540:4047], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:09:05.771942Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4629:4097], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:09:05.836534Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4628:4096] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:09:06.263132Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4650:4110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:09:06.263430Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:09:06.263528Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4652:4112] 2026-01-10T12:09:06.263600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4652:4112] 2026-01-10T12:09:06.263984Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4653:4113] 2026-01-10T12:09:06.264122Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4652:4112], server id = [2:4653:4113], tablet id = 72075186224037894, status = OK 2026-01-10T12:09:06.264226Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4653:4113], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:09:06.264303Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:09:06.264419Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:09:06.264515Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4650:4110], StatRequests.size() = 1 2026-01-10T12:09:06.264794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:14:03.969129Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekwys7bdcejw1yhm4c6nzyr", SessionId: ydb://session/3?node_id=2&id=MzdhY2YxMWMtMmJjNGQ3MTMtNzUzNTA4OGYtNDhjMWE4MGY=, Slow query, duration: 298.446345s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-10T12:14:03.971575Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:14:03.972070Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], Start read next stream part 2026-01-10T12:14:03.972937Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-10T12:14:03.973096Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4784:4233], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:14:03.977998Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4784:4233], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-10T12:14:03.978157Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4784:4233], Start read next stream part 2026-01-10T12:14:03.978520Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:14:03.978622Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4503:2470], ActorId: [2:4513:4033], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWZjMmE4YTctYTExZGM0OGEtNDU0ZGNjN2QtYmM4ZjQ2ZWU=, TxId: 2026-01-10T12:14:04.325548Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4810:4255]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:14:04.325846Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:14:04.325902Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4810:4255], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003b31/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003b31/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpYql::TableConcat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 8080, MsgBus: 6743 2026-01-10T12:18:09.608831Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706144447520817:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.608874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:09.694727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:10.028229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:10.034561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:10.034684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:10.110446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:10.129039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:10.130374Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706144447520798:2081] 1768047489600957 != 1768047489600960 2026-01-10T12:18:10.235616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:10.235638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:10.235650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:10.235725Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:10.257019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:10.632209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:10.687421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:12.914409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706157332423575:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.914526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.914915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706157332423585:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.914959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.214673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.402319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706161627390974:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.402457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.403436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706161627390979:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.403488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706161627390980:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.403533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.408450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:13.437903Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706161627390983:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:18:13.551696Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706161627391034:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:14.143443Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706165922358427:2364], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2026-01-10T12:18:14.145235Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGFiZWMxZmMtM2RhNTQwZTctOGI1YjY5YjItZTM1ZWUxNDk=, ActorId: [1:7593706157332423557:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxfgzk06qp4jsm3q4cz9mr, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Invalid value \"invalid-uuid\" for type Uuid" end_position { row: 3 column: 25 } severity: 1 } tx_id# trace_id# 2026-01-10T12:18:14.609259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706144447520817:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:14.609335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 9075056280637936197 2026-01-10T12:17:55.110338Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:17:55.136023Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:17:55.136114Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:17:55.138723Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:17:55.157956Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:17:55.160918Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:18:17.329860Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:18:17.329991Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:18:17.403585Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> YdbProxy::AlterTopic [GOOD] >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> KqpYql::TableNameConflict [GOOD] >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpYql::Discard+DiscardSelectIsOn >> KqpYql::Closure [GOOD] >> KqpYql::EvaluateExprPgNull [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> KqpYql::ScriptUdf [GOOD] >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpYql::JsonCast [GOOD] >> KqpYql::FromBytes [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2026-01-10T12:18:04.883105Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706124834876795:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.883553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.266006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.271334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.276413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.330329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.404690Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.408220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706124834876745:2081] 1768047484879554 != 1768047484879557 2026-01-10T12:18:05.555536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.796710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.796733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.796738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.796810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.892684Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.091515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.116559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:08.601363Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706142014746820:2502] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-10T12:18:08.622354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.534352Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:18:09.548206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:09.660017Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:09.663973Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706145747348549:2081] 1768047489460908 != 1768047489460911 2026-01-10T12:18:09.671025Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:09.671116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:09.697843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:09.844156Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:09.920646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:09.920669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:09.920679Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:09.920753Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:10.230166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:10.553814Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:12.780823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:12.877505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.148732Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706164805081967:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:14.190450Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:14.219032Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:14.320004Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:14.320260Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:14.321293Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:14.322923Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706164805081932:2081] 1768047494074739 != 1768047494074742 2026-01-10T12:18:14.362745Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:14.508914Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:14.776847Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:14.776875Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:14.776884Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:14.776976Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:15.069161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:15.165366Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:15.267165Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:18:15.300752Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706169100050216:2585] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2026-01-10T12:16:40.966938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:16:40.992815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:16:40.993058Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:16:40.999309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:16:40.999513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:16:40.999743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:16:40.999831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:16:40.999892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:16:40.999952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:16:41.000011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:16:41.000116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:16:41.000198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:16:41.000279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:16:41.000349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:16:41.000427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:16:41.000516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:16:41.022225Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:16:41.022610Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:16:41.022684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:16:41.022894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:41.023081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:16:41.023173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:16:41.023216Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:16:41.023336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:16:41.023404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:16:41.023450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:16:41.023480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:16:41.023664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:16:41.023735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:16:41.023773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:16:41.023819Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:16:41.023914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:16:41.023973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:16:41.024012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:16:41.024041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:16:41.024119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:16:41.024174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:16:41.024200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:16:41.024232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:16:41.024260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:16:41.024279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:16:41.024467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:16:41.024614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:16:41.024640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:16:41.024741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:16:41.024773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:16:41.024801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:16:41.024861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:16:41.024920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:16:41.024949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:16:41.024985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:16:41.025018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:16:41.025047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:16:41.025132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:16:41.025171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.110598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.110892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.111196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.111522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.111812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.112109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.112409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.112626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.112816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.113006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.113210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.113389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.113624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-10T12:18:15.113934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5208,5208;s_splitted=5312,5296;r_splitted=854,854; 2026-01-10T12:18:15.117885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=5;size=320144;limit=10240;r_count=80000;fline=column_info.h:139;sizes=10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005;s_splitted=10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10208;r_splitted=2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2516; 2026-01-10T12:18:15.185596Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6817016;count=1255;actions=__DEFAULT,;waiting=1;; 2026-01-10T12:18:16.436695Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6265200;event=data_write_finished;writing_id=6fd9aec2-ee1e11f0-8ee4229e-c5cc9fc8; 2026-01-10T12:18:16.438955Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=86;data_size=65;sum=4128;count=95; 2026-01-10T12:18:16.439045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=182;data_size=177;sum=8736;count=96;size_of_meta=112; 2026-01-10T12:18:16.439113Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=262;data_size=257;sum=12576;count=48;size_of_portion=192; 2026-01-10T12:18:16.457734Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-10T12:18:16.473992Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-10T12:18:17.610018Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-10T12:18:17.611781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=110;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:18:17.738332Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768047406265 at tablet 9437184, mediator 0 2026-01-10T12:18:17.738447Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] execute at tablet 9437184 2026-01-10T12:18:17.739649Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=128;fline=abstract.h:88;progress_tx_id=128;lock_id=1;broken=0; 2026-01-10T12:18:17.763284Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] complete at tablet 9437184 2026-01-10T12:18:17.763418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=128;lock_id=1;broken=0; 2026-01-10T12:18:17.763647Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-10T12:18:17.763690Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=26; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpScripting::LimitOnShard >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::InsertCVList+useSink [GOOD] >> KqpScripting::ScriptValidate >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpYql::CreateUseTable [GOOD] >> KqpYql::EvaluateIf [GOOD] |91.0%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 3824, MsgBus: 17295 2026-01-10T12:18:04.914433Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706123635924454:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.914516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.954458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.309874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.313885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.314087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.343979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.431090Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706123635924303:2081] 1768047484874062 != 1768047484874065 2026-01-10T12:18:05.443958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.497322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.497353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.497364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.497481Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.519051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.922807Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:05.988497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:05.996209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.051182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.267397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.484754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.586134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.609683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140815795358:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.609806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.610675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140815795368:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.610754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.965960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.006183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.045039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.093857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.121931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.156179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.200977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.240756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.325088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145110763540:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.325208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.327215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145110763545:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.327265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145110763546:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.327314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.331112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.347006Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706145110763549:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.441608Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706145110763600:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:09.915471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706123635924454:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.915538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 4, MsgBus: 9582 2026-01-10T12:18:12.105545Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706159238766144:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.105625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.220960Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.225705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.225796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.229549Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.231922Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706159238766115:2081] 1768047492104502 != 1768047492104505 2026-01-10T12:18:12.249505Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.328089Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.328107Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.328115Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.328206Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:12.516200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.794951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:12.802634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:12.827163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:12.985155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.128506Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.146714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.219644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.838666Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706172123669864:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.838770Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.839191Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706172123669874:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.839242Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.906424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.950665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.005542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.049045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.092667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.150700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.201249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.260646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.340374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176418638042:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.340456Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.340526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176418638047:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.340674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176418638049:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.340707Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.343890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.355191Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706176418638051:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:16.444520Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706176418638102:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.106067Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706159238766144:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.106144Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprPgNull [GOOD] Test command err: Trying to start YDB, gRPC: 31895, MsgBus: 32150 2026-01-10T12:18:05.259130Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706126231150622:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.279072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.306932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.598687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.598818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.604303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.608537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.761169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706126231150578:2081] 1768047485256075 != 1768047485256078 2026-01-10T12:18:05.774287Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.901652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.901675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.901707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.901838Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.904799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.278123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.490967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.545964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.713080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.900866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.016354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.956608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706139116054348:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.956727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.957368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706139116054358:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.957415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.287429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.336951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.385757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.434820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.494425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.528852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.590585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.632574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.728515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143411022525:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.728582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.728914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143411022530:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.728960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143411022531:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.728995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.732438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.754229Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706143411022534:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.811275Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706143411022585:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.258318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706126231150622:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.258378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29295, MsgBus: 22206 2026-01-10T12:18:12.728581Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706156357602483:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.729350Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.739305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:12.847480Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.849140Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706156357602274:2081] 1768047492720360 != 1768047492720363 2026-01-10T12:18:12.860279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.860354Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.864277Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.885901Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:13.032624Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.032646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.032653Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.032725Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.158093Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.509314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.517869Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.537691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.647844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.755725Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.811167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.938307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.342024Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173537473324:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.342117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.342427Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173537473333:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.342495Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.382323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.422325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.458726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.504837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.546715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.606619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.662951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.742010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.866022Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173537474205:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.866113Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.866137Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173537474210:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.866443Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173537474212:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.866507Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.870352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.885065Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706173537474213:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:16.981786Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706173537474265:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.728268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706156357602483:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.728366Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 8459, MsgBus: 11032 2026-01-10T12:18:05.279359Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706129969603809:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.279756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.596157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.638574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.643864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.729322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.789219Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.865289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.957490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.957517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.957524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.957628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.288547Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.559315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.565851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.651344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.805181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.999935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.091152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.989066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706142854507524:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.989190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.994116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706142854507534:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.994202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.314146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.349684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.385701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.418287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.491631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.573492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.609707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.661933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.735988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147149475706:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.736151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.736746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147149475712:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.736783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147149475711:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.736787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.740999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.754136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706147149475715:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.861753Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706147149475766:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.279456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706129969603809:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.280963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20013, MsgBus: 2311 2026-01-10T12:18:12.324448Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706158270207954:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.324497Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.333801Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:12.452239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.456713Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.458674Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706158270207926:2081] 1768047492323460 != 1768047492323463 2026-01-10T12:18:12.467761Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.467859Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.494020Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.673211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.684778Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.684799Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.684808Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.684896Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.132989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.141090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.158344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.218840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.333889Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.371822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.455035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.005446Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706171155111682:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.005525Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.005803Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706175450078988:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.005844Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.114148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.157620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.193226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.235907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.270820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.307780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.358659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.442005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.580403Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706175450079864:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.580518Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.580637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706175450079869:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.580865Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706175450079871:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.580909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.584844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.603532Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706175450079873:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:16.705783Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706175450079924:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.324598Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706158270207954:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.324665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 32315, MsgBus: 26290 2026-01-10T12:18:05.378587Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706126038081843:2218];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.378753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.407249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.768194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.823142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.823254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.825588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.001614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.043964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.043983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.043989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.044094Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.063931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.380382Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.597631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.609670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.708139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.912274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.130530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.207816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.126511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143217952703:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.126661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.127075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143217952713:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.127123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.506557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.544446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.593085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.634458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.720530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.807322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.859828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.922442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.013914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147512920884:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.014026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.014424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147512920889:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.014481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147512920890:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.014529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.018795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.033954Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706147512920893:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.135731Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706147512920946:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.377003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706126038081843:2218];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.377088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 8674, MsgBus: 13611 2026-01-10T12:18:12.697248Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706157325808264:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.697317Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.753815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.867925Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.868003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.873675Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.874641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.961672Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.096755Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.096792Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.096802Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.096887Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.570642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.591441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.676154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.807880Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.872611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.949776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.366807Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174505679171:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.366888Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.367350Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174505679181:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.367397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.498187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.541484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.584221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.626895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.667581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.723995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.777100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.829809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.919811Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174505680055:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.919889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.920198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174505680059:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.920242Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174505680062:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.920277Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.923580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.938135Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706174505680064:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:17.027097Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706178800647411:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.697037Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706157325808264:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.697109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 15469, MsgBus: 12800 2026-01-10T12:17:59.654286Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706104043730057:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:59.654357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:59.887614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:59.887752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:59.932808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:59.941455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:59.942117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:59.943957Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706104043730026:2081] 1768047479653275 != 1768047479653278 2026-01-10T12:18:00.069592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:00.069614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:00.069626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:00.069750Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:00.237480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:00.531653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:00.585118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.663347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:00.744206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.884110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:00.957102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:02.920224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706116928633794:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:02.920390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:02.920665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706116928633804:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:02.920742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.220440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.262363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.295143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.325147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.358810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.397457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.461899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.505453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:03.586123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706121223601971:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.586195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.586608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706121223601977:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.586650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706121223601976:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.586661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:03.590878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:03.602321Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706121223601980:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:03.670088Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706121223602031:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:04.654796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706104043730057:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.654876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:05.268208Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=ZmU3MTNjMjItZDRkOTA0NTUtMTIwMTJjODItNDk0N2FlZGM=, ActorId: [1:7593706116928633776:2397], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-10T12:18:05.268279Z node 1 :KQP_SESSION INFO: {KQPSA@ ... nection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":23}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":16}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":18}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":11,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":11}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":13}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":7,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":7,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":11,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":21,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":22,\"Plans\":[{\"PlanNodeId\":23,\"Plans\":[{\"PlanNodeId\":25,\"Plans\":[{\"PlanNodeId\":26,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":27,\"Plans\":[{\"PlanNodeId\":28,\"Plans\":[{\"PlanNodeId\":30,\"Plans\":[{\"PlanNodeId\":31,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"609de900-57e05e89-1c07d226-6a36f4e0","version":"1.0"} 2026-01-10T12:18:11.566739Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706142698439134:2753], duration: 3.175910s 2026-01-10T12:18:11.566784Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706142698439134:2753], owner: [1:7593706116928633763:2385], status: SUCCESS, issues: , uid: 609de900-57e05e89-1c07d226-6a36f4e0 2026-01-10T12:18:11.566920Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706129813536958:2569], status: SUCCESS, compileActor: [1:7593706142698439134:2753] 2026-01-10T12:18:11.566994Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706129813536958:2569], queryUid: 609de900-57e05e89-1c07d226-6a36f4e0, status:SUCCESS still compiling... 0 received non-success status for session 1 received non-success status for session 31 still active sessions ... 0 received non-success status for session 11 received non-success status for session 16 received non-success status for session 13 received non-success status for session 45 received non-success status for session 12 received non-success status for session 17 received non-success status for session 8 received non-success status for session 9 received non-success status for session 3 received non-success status for session 15 received non-success status for session 46 received non-success status for session 23 received non-success status for session 22 received non-success status for session 33 received non-success status for session received non-success status for session 18 35 received non-success status for session received non-success status for session 47 34 received non-success status for session 26 received non-success status for session 20 received non-success status for session 19 received non-success status for session 42 received non-success status for session 28 received non-success status for session 25 received non-success status for session 24 received non-success status for session 5 received non-success status for session 49 received non-success status for session 30 received non-success status for session received non-success status for session 29 10 received non-success status for session 32 received non-success status for session 7 received non-success status for session 4 received non-success status for session received non-success status for session 39 0 received non-success status for session 27 received non-success status for session 44 received non-success status for session 43 received non-success status for session 36 received non-success status for session 6 received non-success status for session 37 received non-success status for session 2 received non-success status for session 40 received non-success status for session 41 received non-success status for session 38 received non-success status for session 21 received non-success status for session 48 received non-success status for session 14 2026-01-10T12:18:14.854500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:18:14.854540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 16971, MsgBus: 32589 2026-01-10T12:18:04.904182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706122895404030:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.904285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.179615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.179739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.250495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.279519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.289314Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.489070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.547286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.547310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.547317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.547424Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.905037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.076669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.098755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.199587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.379013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.549470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.655291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.793016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140075274955:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.793147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.793680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140075274965:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.793753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.173850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.203573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.239881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.292957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.327674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.402854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.446100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.500025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.600395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144370243140:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.600476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.600808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144370243145:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.600833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706144370243146:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.600880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.605077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.626544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706144370243149:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.697702Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706144370243200:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:09.880349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706122895404030:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.880408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27624, MsgBus: 15192 2026-01-10T12:18:12.608124Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.608306Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:18:12.744182Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706159579591782:2081] 1768047492534304 != 1768047492534307 2026-01-10T12:18:12.782115Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.799923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.800011Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.806852Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.832357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.000827Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.000848Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.000856Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.000939Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.567800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.688769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.700347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.718385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.787317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.926533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.987950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.461023Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176759462832:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.461144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.461758Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176759462842:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.461814Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.547336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.621173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.657348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.691590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.726223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.791177Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.830359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.887076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.984135Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176759463714:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.984219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.984647Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176759463719:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.984714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176759463720:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.984758Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.988872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.004096Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706176759463723:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:17.064288Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706181054431070:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 31086, MsgBus: 24339 2026-01-10T12:18:04.849976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706124104507826:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.850792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.878830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.170461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.170574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.223956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.310377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.313131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.313295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706124104507748:2081] 1768047484766331 != 1768047484766334 2026-01-10T12:18:05.522121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.522157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.522165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.522276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.545039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.879437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.076816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.093721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.165492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.324715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.514420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.607062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.581176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706141284378811:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.581321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.588247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706141284378821:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.588392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.978537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.017552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.057889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.118422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.164692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.206748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.247334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.290883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.371587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145579346987:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.371670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.371754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145579346992:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.372101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145579346994:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.372132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.375883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.391163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706145579346995:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.472057Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706145579347047:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:09.843600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706124104507826:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.843670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... NDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 Trying to start YDB, gRPC: 24782, MsgBus: 4655 2026-01-10T12:18:12.381314Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706156520781086:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.383127Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.404180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.524236Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.547783Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.547866Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.563656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.662284Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.724729Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.724760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.724769Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.724868Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.252467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.258826Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.273880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.340734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.449039Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.523183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.600148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.465808Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173700652085:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.465910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.466496Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173700652095:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.466561Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.542095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.582782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.629265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.669330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.709701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.758258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.805204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.883459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.969214Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173700652965:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.969318Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.969640Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173700652970:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.969689Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706173700652971:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.969975Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.973630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.991272Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706173700652974:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:17.087221Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706177995620321:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.379249Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706156520781086:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.379305Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 24771, MsgBus: 20297 2026-01-10T12:18:05.298617Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706128587065682:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.299309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.759756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.765447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.782204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.847234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.873576Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.875027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706128587065571:2081] 1768047485278797 != 1768047485278800 2026-01-10T12:18:06.062352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.062379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.062386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.062483Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.114897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.284220Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.651592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.657705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.753624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.971200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.152490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.245036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.292706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145766936626:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.292827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.293208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145766936636:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.293274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.637910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.677801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.722590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.766127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.814192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.861427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.917805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.973368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.060767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706150061904807:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706150061904812:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.061058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706150061904813:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.061096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.065627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.079764Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706150061904816:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.154105Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706150061904867:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.282737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706128587065682:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.282813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:11.913092Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706154356872481:2541], status: PRECONDITION_FAI ... 01-10T12:18:13.034555Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:13.052044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:13.181563Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:13.183398Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:13.186511Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706161435384883:2081] 1768047493012556 != 1768047493012559 2026-01-10T12:18:13.222928Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:13.223011Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:13.228806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:13.349726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.349746Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.349754Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.349830Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.365617Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.784768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.794185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.806705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.856443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.005099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.046787Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:14.072340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.664615Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174320288636:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.664726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.672273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174320288646:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.672405Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.731304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.766749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.801441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.841466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.892556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.939294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.980210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.031750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.116424Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178615256819:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.116513Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.116529Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178615256824:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.116669Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178615256826:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.116711Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.120757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.135096Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706178615256827:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:17.228738Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706178615256879:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:18.035011Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706161435384932:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:18.035090Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:19.018133Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499043, txId: 281474976715673] shutting down |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 8112, MsgBus: 30915 2026-01-10T12:18:05.376056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706128431085100:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.376152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.687713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.704595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.704690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.766729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.906962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.928162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.064616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.064635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.064641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.064707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.398703Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.546903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.663490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.844804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.018877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.104178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.989791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706141315988818:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.989894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.991252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706141315988828:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.991323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.331255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.373501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.414179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.454330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.500412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.555555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.606854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.660807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.750811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145610956991:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.750883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.751165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145610956997:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.751210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145610956996:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.751333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.754796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.769269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706145610957000:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:09.837790Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706145610957051:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.374900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706128431085100:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.374980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28984, MsgBus: 61097 2026-01-10T12:18:12.624370Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706159039631489:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.624985Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.660767Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.788173Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706159039631383:2081] 1768047492620988 != 1768047492620991 2026-01-10T12:18:12.798301Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.798401Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.801058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.816804Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.902123Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.052633Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.052658Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.052667Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.052741Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.603911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.641152Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.649802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.665924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.732037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.905369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.984297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.430210Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176219502431:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.430306Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.430857Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176219502441:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.430920Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.478891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.524921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.567801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.645678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.677034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.715376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.763650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.837766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.945724Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176219503312:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.945851Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.946217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176219503317:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.946264Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706176219503318:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.946341Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.950626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.962212Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706176219503321:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:17.020693Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706180514470668:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.623701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706159039631489:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.623775Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22215, MsgBus: 7315 2026-01-10T12:18:05.467932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706128290893710:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.468007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.923510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.923647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.942408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.982648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.048518Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.110784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.110817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.110833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.110917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.255962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.476418Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.702839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.713478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.815417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.059113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.251918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.348620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.363587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145470764628:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.363690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.364048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145470764638:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.364114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.703720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.732902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.811908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.840698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.869223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.930980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.022591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.076543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.170425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149765732809:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.170523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.170849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149765732814:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.170891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149765732815:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.170915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.174965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.191153Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706149765732818:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.292843Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706149765732869:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.461499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706128290893710:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.461570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:12.244808Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706158355667772:2537], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxfeth057n1y04m2qgsp0g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NzQ1NTg2MWItMmNkODA1Y2UtYjdjNGY4MC03YjBiM ... eration type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.117117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.193717Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:14.279183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.391185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.220461Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181662279885:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.220560Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.220866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181662279894:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.220924Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.298942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.333900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.369978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.406034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.450484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.491882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.539448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.591835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.684680Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181662280768:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.684761Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.684982Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181662280773:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.685025Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181662280774:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.685127Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.688390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.701335Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706181662280777:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:17.777199Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706181662280828:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:18.184187Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706164482408887:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:18.184322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:19.523809Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-10T12:18:19.524099Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:18:19.524241Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:18:19.524389Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7593706190252215759:2538], Table: `/Root/Test` ([72057594046644480:45:1]), SessionActorId: [2:7593706190252215744:2538]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7593706190252215759:2538].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:18:19.524515Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7593706190252215753:2538], SessionActorId: [2:7593706190252215744:2538], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7593706190252215744:2538]. 2026-01-10T12:18:19.524818Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=NzBiNjY5NjItNDlkMWE4M2YtOWVkMWYwNWQtZTNjYjBlZmU=, ActorId: [2:7593706190252215744:2538], ActorState: ExecuteState, LegacyTraceId: 01kekxfp459e264m5sbn80endd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7593706190252215754:2538] from: [2:7593706190252215753:2538] trace_id# 2026-01-10T12:18:19.524943Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:7593706190252215754:2538] TxId: 281474976715673. Ctx: { TraceId: 01kekxfp459e264m5sbn80endd, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzBiNjY5NjItNDlkMWE4M2YtOWVkMWYwNWQtZTNjYjBlZmU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:18:19.525239Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NzBiNjY5NjItNDlkMWE4M2YtOWVkMWYwNWQtZTNjYjBlZmU=, ActorId: [2:7593706190252215744:2538], ActorState: ExecuteState, LegacyTraceId: 01kekxfp459e264m5sbn80endd, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 63671, MsgBus: 7569 2026-01-10T12:18:06.064819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706132416437060:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:06.096215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:06.436364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.448358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:06.448481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:06.498691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.607937Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.792200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.868482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.868507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.868527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.868622Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:07.112782Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.451794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:07.474909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:07.529167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.686541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.886104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.969501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.862558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145301340767:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.862667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.863320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706145301340777:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.863381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.240928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.273558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.307951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.340355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.370351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.425014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.498149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.553695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.650857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149596308951:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.650968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.651353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149596308956:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.651396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149596308957:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.651706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.656384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.670386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706149596308960:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.775642Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706149596309011:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:11.068311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706132416437060:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.068383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 15961, MsgBus: 16575 2026-01-10T12:18:13.503637Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:18:13.505983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:13.628490Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706162280992890:2081] 1768047493406551 != 1768047493406554 2026-01-10T12:18:13.631537Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:13.637286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:13.637366Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:13.660816Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:13.693993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.768650Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.768673Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.768681Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.768764Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:14.204625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:14.214153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:14.224101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.342505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.487909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.526403Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:14.561553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.149151Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179460863953:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.149280Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.150871Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179460863963:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.150950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.234750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.270418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.305134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.339556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.384438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.446265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.492457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.561076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.677014Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179460864835:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.677102Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.677380Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179460864840:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.677442Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179460864841:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.677537Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.681752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.697706Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706179460864844:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:17.754043Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706179460864895:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:19.407099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.614534Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499652, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 12281, MsgBus: 14323 2026-01-10T12:18:04.924373Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706122406848117:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:04.924418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:04.964350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:05.318020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.318033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.318195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.342516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.359623Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706122406848090:2081] 1768047484922471 != 1768047484922474 2026-01-10T12:18:05.376706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.580513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.688445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.688468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.688476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.688550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:05.944258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.445368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.478564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.535258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.696268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.992959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.080183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.071498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143881686446:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.071637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.072198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143881686455:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.072319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.413371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.459018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.515607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.566579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.596993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.633917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.675307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.754942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.861743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143881687331:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.861812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.862227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143881687336:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.862273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143881687337:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.862524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.866843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.884982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706143881687340:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:09.925266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706122406848117:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.925339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:09.966738Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706143881687393:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:12.544164Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.547608Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.549689Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706157030187636:2081] 1768047492417272 != 1768047492417275 2026-01-10T12:18:12.590829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.590918Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.597412Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.760217Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.768713Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.768738Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.768751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.768837Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.289603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.296262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.307262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.404114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.426427Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.572326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.675349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.270540Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174210058690:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.270653Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.270949Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174210058700:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.270982Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.338026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.374768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.445248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.482955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.516368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.565505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.604406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.711593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.806756Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174210059582:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.806845Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.807161Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174210059588:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.807179Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174210059587:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.807269Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.811268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.822138Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706174210059591:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:16.915833Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706174210059642:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.419625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706157030187674:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.419701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:18.815272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.645545Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499680, txId: 281474976710675] shutting down |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateIf [GOOD] Test command err: Trying to start YDB, gRPC: 21993, MsgBus: 26688 2026-01-10T12:18:06.464412Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706132059740565:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:06.464574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:06.914692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:06.914793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:06.916211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.926141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:07.068229Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706132059740544:2081] 1768047486454249 != 1768047486454252 2026-01-10T12:18:07.074242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:07.092172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:07.316689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:07.316713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:07.316721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:07.316792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:07.488177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.850473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:07.924222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.099167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.262858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.337006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.375261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149239611592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.375385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.379718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149239611602:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.379785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.780984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.808926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.835338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.867205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.927992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.970801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.018659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.062906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.141512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706153534579768:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.141610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.141805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706153534579773:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.141840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706153534579774:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.141874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.145661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:11.156575Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706153534579777:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:11.239715Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706153534579828:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:11.461789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706132059740565:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.461854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22692, MsgBus: 18307 2026-01-10T12:18:13.972328Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706161507863806:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:13.998747Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:14.047894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:14.149692Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:14.150857Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706161507863783:2081] 1768047493961290 != 1768047493961293 2026-01-10T12:18:14.191475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:14.191564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:14.200931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:14.208162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:14.428093Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:14.428122Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:14.428129Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:14.428221Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:14.933724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:14.946032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:14.962421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.010029Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:15.044934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.196305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.274317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.741230Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178687734830:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.741325Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.741742Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178687734840:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.741806Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.820646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.854161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.887256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.920495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.949173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.987971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.023699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.073047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.185387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706182982703009:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.185485Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.186244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706182982703014:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.186298Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706182982703015:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.186336Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.191228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:18.201955Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706182982703018:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:18.290071Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706182982703069:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:18.968211Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706161507863806:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:18.968291Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPartitionTests::UserActCount >> KqpScripting::ScriptExplain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 12980, MsgBus: 9258 2026-01-10T12:18:06.136004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706130572674860:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:06.137369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:06.176989Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.007319s 2026-01-10T12:18:06.536167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.561150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:06.561255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:06.642130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.736888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.740200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706130572674824:2081] 1768047486100538 != 1768047486100541 2026-01-10T12:18:06.819578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:07.011937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:07.011964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:07.011978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:07.012117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:07.133065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.535260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:07.590865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.722301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.895100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.991578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.979924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143457578594:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.980050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.984322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143457578603:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.984438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.303776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.335527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.368896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.401373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.435521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.475395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.518166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.565146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.655691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147752546770:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.655812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.655956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147752546775:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.656317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147752546777:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.656387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.661385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.674261Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706147752546779:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:10.759856Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706147752546830:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:11.105116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706130572674860:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.105197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:12.339464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at s ... 68801894:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:13.887316Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:13.930193Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:14.037126Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:14.058640Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:14.058720Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:14.061458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:14.185983Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:14.186004Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:14.186019Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:14.186116Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:14.207799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:14.603159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:14.609308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:14.621611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.678494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.839640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.902831Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:14.914493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.435449Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179448672905:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.435530Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.435947Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179448672915:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.435998Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.516017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.551216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.594026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.632433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.688857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.730717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.770538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.823407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.917446Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179448673783:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.917563Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.917924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179448673789:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.918021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.918022Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706179448673788:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.922557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.937071Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706179448673792:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:18.014549Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706183743641139:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:18.890190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706162268801894:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:18.890293Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:19.667624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.312944Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047500345, txId: 281474976710675] shutting down |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] |91.1%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 16533, MsgBus: 12005 2026-01-10T12:18:05.422756Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706129107674896:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.422842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.733914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.763212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.763323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.854827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.913027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.913173Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706129107674864:2081] 1768047485421770 != 1768047485421773 2026-01-10T12:18:06.006671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.036549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.036579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.036585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.036658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.448969Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.611351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.726364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.890445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.082605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.200833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.101102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706146287545921:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.101224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.101528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706146287545931:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.101563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.507908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.553410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.588988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.624976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.661741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.710694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.740577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.789864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.884304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706146287546800:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.884376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.884806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706146287546805:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.884845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706146287546806:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.884871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.889588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:09.901748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706146287546809:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:10.010690Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706150582514156:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.423069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706129107674896:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.423142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:11.604685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 146131:7762515]; 2026-01-10T12:18:12.993830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:13.014428Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:13.122197Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:13.122297Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:13.127029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:13.156157Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:13.267506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:13.306862Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:13.306885Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:13.306894Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:13.306968Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.858881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.869170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.891251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.981019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.078144Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:14.175007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.255105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.757617Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174290667785:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.757728Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.758256Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706174290667795:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.758311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.844207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.881511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.918341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.952381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.998106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.044675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.082218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.142794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.233027Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178585635964:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.233100Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178585635969:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.233117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.233293Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706178585635972:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.233347Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:17.237151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:17.248689Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706178585635971:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:17.322808Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706178585636024:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.990968Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706157110796767:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.991042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:19.432419Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499388, txId: 281474976710673] shutting down 2026-01-10T12:18:19.592635Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499580, txId: 281474976710675] shutting down 2026-01-10T12:18:20.548312Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047500569, txId: 281474976710677] shutting down >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> Cdc::ShouldBreakLocksOnConcurrentAddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 22281, MsgBus: 8647 2026-01-10T12:18:15.039283Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706171498035087:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:15.039334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:15.388272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:15.422393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:15.422589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:15.477948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:15.485800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:15.571651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:15.626539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:15.626566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:15.626576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:15.626681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:16.053900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:16.066134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:16.123523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.265896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.459832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.540772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.461781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706184382938700:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.461896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.462372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706184382938710:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.462427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.785092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.825340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.859170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.891731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.929950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.976596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.016343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.071730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.140615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188677906885:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.140682Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.144319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188677906890:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.144355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188677906891:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.144407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.148070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:19.159689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706188677906894:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:19.223479Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706188677906945:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:20.039308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706171498035087:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.039397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c >> KqpYql::EvaluateExpr1 [GOOD] >> TPQTabletTests::UpdateConfig_1 >> KqpYql::EvaluateExpr2 >> KqpScripting::ScanQuery [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions >> KqpScripting::ScanQueryDisable >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c [GOOD] >> TPartitionTests::CommitOffsetRanges |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: 2026-01-10T12:15:31.611551Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705464684252658:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:31.622401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:32.120211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:32.131064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:32.131174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:32.284831Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705464684252501:2081] 1768047331558198 != 1768047331558201 2026-01-10T12:15:32.297673Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:32.300020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:32.304198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:32.608918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:32.608944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:32.608967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:32.609078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:32.628262Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:32.724402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:32.748303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:32.781600Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7593705468979220636:2297] 2026-01-10T12:15:32.781874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:32.796658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:32.796720Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:32.798814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:32.798859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:32.798889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:32.799261Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:32.799329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:32.799358Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7593705468979220651:2297] in generation 1 2026-01-10T12:15:32.812478Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:32.855374Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:32.855552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:32.855624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7593705468979220653:2298] 2026-01-10T12:15:32.855643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:32.855654Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:32.855668Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:32.855867Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:32.855936Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:32.856028Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705468979220634:2499], serverId# [1:7593705468979220638:2500], sessionId# [0:0:0] 2026-01-10T12:15:32.856067Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:32.856104Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:32.856130Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:32.856175Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:32.856315Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:32.856573Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:32.856651Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-10T12:15:32.858756Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:32.858844Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:32.858906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:32.864822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705468979220667:2516], serverId# [1:7593705468979220669:2518], sessionId# [0:0:0] 2026-01-10T12:15:32.870713Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1768047332905 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047332905 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:32.870765Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:32.870947Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:32.871059Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:32.871078Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:32.871110Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768047332905:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:32.871412Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1768047332905:281474976710657 keys extracted: 0 2026-01-10T12:15:32.871548Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:32.871644Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:32.871692Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:32.874584Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:32.875122Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:32.877774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1768047332904 2026-01-10T12:15:32.877814Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:32.877851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1768047332912 2026-01-10T12:15:32.877924Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1768047332905} 2026-01-10T12:15:32.877976Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:32.878044Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:32.878071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:32.878090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:32.878173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047332905 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7593705464684252863:2153], exec latency: 3 ms, propose latency: 6 ms 2026-01-10T12:15:32.878203Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:15:32.878258Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T ... : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:402:2401], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:18:20.541759Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-10T12:18:20.541953Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:18:20.545850Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2026-01-10T12:18:20.546011Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2026-01-10T12:18:20.570334Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 4500} 2026-01-10T12:18:20.570433Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:18:20.570476Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:18:20.570556Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:402:2401], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:18:20.570619Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-10T12:18:20.570923Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:18:20.573818Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2026-01-10T12:18:20.573895Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:18:20.623459Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:20.623557Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.623602Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:20.623649Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.623688Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:18:20.672016Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:20.672144Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.672193Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:20.672245Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.672284Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:18:20.707042Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:20.707137Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.707188Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:20.707241Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.707275Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:18:20.752554Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:20.752656Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.752708Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:20.752758Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.752802Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:18:20.775362Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:20.775435Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.775472Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:20.775513Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:20.775549Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:18:21.009732Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:8] at 72075186224037888 2026-01-10T12:18:21.009943Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2026-01-10T12:18:21.010079Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-10T12:18:21.010407Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:18:21.010743Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:18:21.010852Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:18:21.011214Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [25:1405:3076], Table: `/Root/Table` ([72057594046644480:38:3]), SessionActorId: [25:1349:3076]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1405:3076].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:18:21.011464Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [25:1399:3076], SessionActorId: [25:1349:3076], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1349:3076]. 2026-01-10T12:18:21.011927Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=25&id=NjAxODQzZTEtY2VlYjk2OTMtZGM3NmRkZmUtMWMzNGQzN2Y=, ActorId: [25:1349:3076], ActorState: ExecuteState, LegacyTraceId: 01kekxfqfc2m7mtab6yfybj2q3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1486:3076] from: [25:1399:3076] trace_id# 2026-01-10T12:18:21.012245Z node 25 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [25:1486:3076] TxId: 281474976715665. Ctx: { TraceId: 01kekxfqfc2m7mtab6yfybj2q3, Database: , SessionId: ydb://session/3?node_id=25&id=NjAxODQzZTEtY2VlYjk2OTMtZGM3NmRkZmUtMWMzNGQzN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:18:21.013023Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=25&id=NjAxODQzZTEtY2VlYjk2OTMtZGM3NmRkZmUtMWMzNGQzN2Y=, ActorId: [25:1349:3076], ActorState: ExecuteState, LegacyTraceId: 01kekxfqfc2m7mtab6yfybj2q3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:18:21.014434Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:9] at 72075186224037888 2026-01-10T12:18:21.014546Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:9] at 72075186224037888 2026-01-10T12:18:21.014819Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-10T12:18:21.019460Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:18:21.019588Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:18:21.020442Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-10T12:18:21.021003Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:18:21.021107Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-10T12:18:21.021279Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:04.000000Z 2026-01-10T12:18:21.021478Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-10T12:18:21.021677Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:18:21.021865Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:18:21.022168Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2026-01-10T12:18:21.022924Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPQTestInternal::TestPartitionedBigTest >> TPQTabletTests::UpdateConfig_2 >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::SetOffset >> TPQTest::TestReadRuleVersions >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TPartitionTests::ShadowPartitionCounters >> KqpScripting::SecondaryIndexes [GOOD] >> KqpPragma::Warning [GOOD] |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 16582, MsgBus: 11080 2026-01-10T12:18:05.205407Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706126401677058:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.205481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:05.524189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:05.538095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:05.538256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:05.563338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:05.634276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:05.648220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706126401677024:2081] 1768047485203688 != 1768047485203691 2026-01-10T12:18:05.858569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:05.902533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:05.902564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:05.902576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:05.902661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.240599Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:06.511416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:06.550833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:06.629114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:06.897679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.119562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.193096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.212650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143581548082:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.212856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.213486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706143581548092:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.213563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.621561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.664642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.702292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.766125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.795966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.846859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.901604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.955285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.060294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147876516256:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147876516261:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147876516262:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.060768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.065090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.074900Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706147876516265:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.149377Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706147876516316:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.210228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706126401677058:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.213791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:11.741367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... a/script_executions 2026-01-10T12:18:15.432724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:15.432745Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:15.432752Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:15.432858Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:15.918623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:15.926922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:15.940616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.043489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.125101Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:16.219773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.337145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.405153Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181784902064:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.405301Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.408643Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181784902074:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.408719Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.468160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.502685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.538269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.577773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.617301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.665644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.703997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.761395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.856614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181784902942:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.856706Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.856874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181784902947:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.856922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706181784902948:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.856972Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.860955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:18.873346Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706181784902951:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:18.976143Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706181784903002:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:20.088898Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706168899998337:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.088988Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:21.601695Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706194669805234:2539], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:21.604676Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTMxNmU2MTMtZThlMWUyZTYtNTk3ZTYyZWUtMmY2MDc1OTM=, ActorId: [2:7593706194669805232:2538], ActorState: ExecuteState, LegacyTraceId: 01kekxfr8c5eam72d7ad9b39p6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 168 } message: "At function: DataQueryBlocks" end_position { row: 1 column: 168 } severity: 1 issues { position { row: 1 column: 185 } message: "At function: TKiDataQueryBlock" end_position { row: 1 column: 185 } severity: 1 issues { position { row: 1 column: 208 } message: "At function: KiEffects" end_position { row: 1 column: 208 } severity: 1 issues { position { row: 1 column: 219 } message: "At function: KiWriteTable!" end_position { row: 1 column: 219 } severity: 1 issues { position { row: 1 column: 219 } message: "Cannot find table \'db.[/Root/ScriptingTest]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 219 } issue_code: 2003 severity: 1 } } } } } } tx_id# trace_id# |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPartitionTests::CommitOffsetRanges [GOOD] >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPQTabletTests::ProposeTx_Missing_Operations >> TPartitionTests::CorrectRange_Rollback >> TMeteringSink::FlushPutEventsV1 [GOOD] >> PQCountersSimple::Partition |91.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPartitionTests::SetOffset [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPartitionTests::OldPlanStep >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TPartitionTests::CorrectRange_Rollback [GOOD] >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionLevelMetrics >> TPQTest::TestPQPartialRead >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> KqpYql::ColumnTypeMismatch [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> TPQTest::DirectReadBadSessionOrPipe >> TPQTest::TestCompaction >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTabletTests::ProposeTx_Unknown_Partition_2 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] >> TPartitionTests::IncorrectRange >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 1999, MsgBus: 26308 2026-01-10T12:18:09.587004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706143036491487:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:09.587028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:09.876159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:09.908119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:09.908250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:09.980348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706143036491463:2081] 1768047489584766 != 1768047489584769 2026-01-10T12:18:09.988311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:09.992039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:10.147903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:10.147925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:10.147935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:10.148000Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:10.166570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:10.570372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:10.593343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:10.624959Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:10.666946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.805180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.966424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.043908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.210504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706160216362528:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.210612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.211081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706160216362538:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.211134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:13.586747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.646274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.686518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.728952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.794598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.850238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.885721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.941832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:14.046376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706164511330706:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:14.046471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:14.046739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706164511330711:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:14.046788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706164511330712:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:14.046827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:14.050781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:14.073611Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706164511330715:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:14.135346Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706164511330766:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:14.589039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706143036491487:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:14.590585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:16.514272Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706173101265705:2545], status: GENERIC_ERROR, is ... es { position { row: 2 column: 26 } message: "Member not found: _yql_partition_id" end_position { row: 2 column: 26 } severity: 1 } } } } } } } } } } tx_id# trace_id# Trying to start YDB, gRPC: 7347, MsgBus: 25818 2026-01-10T12:18:17.519692Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706179899768085:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.520155Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:17.557125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:17.624182Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706179899768059:2081] 1768047497516163 != 1768047497516166 2026-01-10T12:18:17.641351Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:17.679796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:17.679895Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:17.691888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:17.766104Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:17.766135Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:17.766144Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:17.766238Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:17.808951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:18.219390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:18.227072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:18.239641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.304035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.478997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.543762Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:18.604348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.820269Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706192784671817:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.820383Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.820733Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706192784671827:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.820777Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.894686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.928845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.973477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.009802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.044135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.091733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.145761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.201661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.306864Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706197079639994:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.306965Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.307289Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706197079639999:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.307336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706197079640000:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.307552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.311766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:21.326136Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706197079640003:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:21.411032Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706197079640054:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:22.520274Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706179899768085:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:22.520352Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 6282, MsgBus: 10041 2026-01-10T12:18:05.970794Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706130073432556:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:05.971148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:06.240910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:06.251990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:06.252124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:06.277269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:06.380366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706130073432445:2081] 1768047485927208 != 1768047485927211 2026-01-10T12:18:06.427730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:06.435598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:06.700398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:06.700426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:06.700439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:06.700540Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:06.986092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:07.461080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:07.527488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.708686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.862408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.942549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.990450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147253303501:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.990560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.991043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706147253303511:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:09.991088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.347012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.398129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.435393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.464718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.502485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.541308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.580419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.632581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.723940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706151548271675:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.724038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.724459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706151548271680:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.724507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706151548271681:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.724558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.728473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:10.741228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706151548271684:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:10.837406Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706151548271735:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:10.968889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706130073432556:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:10.968954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:12.545952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... ification cookie mismatch for subscription [2:7593706172124195712:2081] 1768047495371308 != 1768047495371311 2026-01-10T12:18:15.552441Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:15.552524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:15.563485Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:15.566604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:15.638451Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:15.638476Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:15.638484Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:15.638568Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:16.099070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:16.114655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:16.131832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.208997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.370677Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:16.396911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.482643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.847692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706185009099466:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.847804Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.848089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706185009099476:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.848141Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.948846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.987497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.022597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.062608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.098833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.132239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.164197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.207249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.310766Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706189304067648:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.310868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.311258Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706189304067654:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.311300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706189304067653:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.311404Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.316286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:19.327962Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706189304067657:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:19.427751Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706189304067710:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:20.374523Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706172124195742:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.374602Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:21.203428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.248849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.295719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpYql::Discard+DiscardSelectIsOn [GOOD] >> KqpYql::Discard-DiscardSelectIsOn >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c [GOOD] >> TPQTest::TestUserInfoCompatibility >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestSeveralOwners >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a >> TPartitionTests::IncorrectRange [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount >> TPQTabletTests::ReadQuoter_ExclusiveLock >> TPartitionTests::ReserveSubDomainOutOfSpace >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 16685, MsgBus: 18364 2026-01-10T12:18:11.671079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706155523220426:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.671165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:11.924983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:11.956545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:11.956685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:11.986458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.030499Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.032177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706155523220390:2081] 1768047491669520 != 1768047491669523 2026-01-10T12:18:12.144947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.144992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.145007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.145100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:12.212178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.692405Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:12.727709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:12.735011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:12.825392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:12.988956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.191171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.302283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.430269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172703091444:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.430399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.431719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172703091454:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.431771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.785914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.822322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.861150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.901529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.945254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.986871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.029125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.109342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.216491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706176998059627:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.216598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.216880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706176998059632:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.216941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706176998059633:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.216992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.220825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.240199Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706176998059636:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:16.329562Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706176998059687:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:16.672257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706155523220426:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:16.672346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, ... R WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:19.195533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:19.195608Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:19.232170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:19.581213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:19.599076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.667587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.794363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.854361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.991027Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:22.328530Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199050029590:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.328621Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.336224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199050029600:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.336331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.409839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.451767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.486166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.521479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.555768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.594234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.641462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.695461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.782257Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199050030470:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.782346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.782483Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199050030475:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.782510Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199050030476:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.782749Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.786317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:22.799021Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706199050030479:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:22.857336Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706199050030530:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:23.956565Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706181870158559:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:23.957495Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:24.486799Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706207639965427:2536], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-10T12:18:24.487237Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NTQxYWU5YWMtZDk2ZDA2NzYtNWExMmU5OTAtZGJlOGY5MGI=, ActorId: [2:7593706207639965419:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxfv2a23fw3r67fw9ydgrd, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 26 } message: "At function: KiWriteTable!" end_position { row: 5 column: 26 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert type: Struct<\'Key\':Uint64,\'Value\':Uint64> to Struct<\'Key\':Uint64?,\'Value\':String?>" end_position { row: 6 column: 27 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert \'Value\': Uint64 to Optional" end_position { row: 6 column: 27 } severity: 1 } } issues { position { row: 6 column: 27 } message: "Failed to convert input columns types to scheme types" end_position { row: 6 column: 27 } issue_code: 2031 severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPQTest::DirectReadBadSessionOrPipe [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch >> KqpScripting::JoinIndexLookup [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 8068, MsgBus: 11790 2026-01-10T12:18:11.570230Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706154840470608:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.570681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:11.870581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:11.870698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:11.881359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:11.885401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:11.898914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:11.984555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:11.984591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:11.984601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:11.984696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:12.176014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.514824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:12.525105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:12.580974Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:12.605124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:12.816015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.027630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.139606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.158307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172020341616:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.158438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.158801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172020341626:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.158844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.551768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.585287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.616827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.696555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.728312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.768640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.808329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.854229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.944007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172020342498:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.944109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.944392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172020342503:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.944408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706172020342504:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.944460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.948317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:15.962346Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706172020342507:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:16.033724Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706176315309854:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:16.572188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706154840470608:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:16.572282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:18.299610Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047498315, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 29202, MsgBus: 9951 2026-01-10T12:18:19.263081Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsA ... _undelivered_message; 2026-01-10T12:18:19.273500Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:19.338710Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706186531474583:2081] 1768047499255178 != 1768047499255181 2026-01-10T12:18:19.364863Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:19.373213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:19.397973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:19.398063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:19.406936Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:19.473994Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:19.474036Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:19.474045Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:19.474139Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:19.608224Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:19.913208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:19.921457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:19.927892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.019972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.171746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.260402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.264099Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:22.855555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199416378350:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.855648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.855999Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706199416378360:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.856064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.941825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.975465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.007233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.044842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.075829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.109336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.147944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.200358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.312355Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706203711346530:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.312440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.312617Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706203711346535:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.312655Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706203711346536:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.312883Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.316006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:23.327140Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706203711346539:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:23.421455Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706203711346590:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:24.259698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706186531474698:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.259756Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:25.273782Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047505301, txId: 281474976710673] shutting down 2026-01-10T12:18:25.450458Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047505469, txId: 281474976710675] shutting down |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] Test command err: 2026-01-10T12:18:22.755149Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:22.838963Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:22.843677Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:22.844158Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:22.844234Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:22.844315Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:22.844378Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:22.844462Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:22.844533Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:22.894798Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-10T12:18:22.895000Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:22.916806Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:22.920322Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:22.920537Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:22.921607Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:22.921852Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:22.922378Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:22.922924Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-10T12:18:22.924049Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:22.924135Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:22.924207Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-10T12:18:22.924273Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:22.924379Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:22.925044Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:22.925099Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:22.925140Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:22.925222Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:22.925261Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:22.925317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:22.925411Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:22.925461Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:22.925502Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:22.925544Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:22.925606Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:22.925869Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:22.925955Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:22.926156Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:22.926368Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:22.929283Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:22.929426Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:22.929503Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:22.929553Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:22.929602Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:22.929654Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:22.929749Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:22.929810Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:22.930254Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-10T12:18:22.930974Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-10T12:18:22.931915Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-10T12:18:22.932003Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-10T12:18:22.932121Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-10T12:18:22.932191Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-10T12:18:22.932244Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-10T12:18:22.932291Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-10T12:18:22.932366Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-10T12:18:22.932425Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-10T12:18:22.932590Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 179 RawX2: 4294969488 } Partitions { } 2026-01-10T12:18:22.932723Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:22.935119Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:22.935190Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-10T12:18:22.935252Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-10T12:18:22.935295Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-10T12:18:22.939473Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 179 RawX2: 4294969488 } } St ... partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.072440Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.072484Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.072519Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.093050Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.103766Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.103853Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.103888Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.103930Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.103965Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.124587Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.124657Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.124695Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.124737Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.124786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.135079Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.155985Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.166719Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.166795Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.166833Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.166886Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.166919Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.167438Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:382:2327], now have 1 active actors on pipe 2026-01-10T12:18:27.167876Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletProducer: 22226 ReadSet: "\010\002" Seqno: 0 2026-01-10T12:18:27.167934Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22226 2026-01-10T12:18:27.167986Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-10T12:18:27.168030Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 5/5 2026-01-10T12:18:27.168094Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-10T12:18:27.168142Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-10T12:18:27.168212Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-10T12:18:27.168259Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-10T12:18:27.168362Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-10T12:18:27.168412Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-10T12:18:27.168502Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-10T12:18:27.168554Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.168640Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.168681Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.168745Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:27.168827Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-10T12:18:27.168869Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:27.168911Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:27.168959Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.169176Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:27.173132Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:27.173272Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:27.173330Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.173372Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:27.173417Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.173457Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:27.173494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.173544Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:27.173658Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-10T12:18:27.173709Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:18:27.173749Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:18:27.173796Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:18:27.173839Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:18:27.173890Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-10T12:18:27.173935Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-10T12:18:27.173974Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-10T12:18:27.174049Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-10T12:18:27.174109Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:27.174156Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:27.174193Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:27.174230Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:27.174269Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:27.174305Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-10T12:18:27.174346Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-10T12:18:27.174393Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-10T12:18:27.174445Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:27.174477Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-10T12:18:27.174629Z node 6 :PQ_TX INFO: pq_impl.cpp:3459: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletConsumer: 72057594037927937 Flags: 0 Seqno: 0 2026-01-10T12:18:27.174665Z node 6 :PQ_TX DEBUG: transaction.cpp:344: [TxId: 67890] Handle TEvReadSetAck txId 67890 2026-01-10T12:18:27.174710Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-10T12:18:27.174746Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-10T12:18:27.174785Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-10T12:18:27.174822Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:27.174856Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure >> GroupWriteTest::ByTableName [GOOD] >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] Test command err: 2026-01-10T12:18:23.512153Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.590697Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:23.595590Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:23.595955Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:23.596021Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:23.596120Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:23.596177Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:23.596240Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:23.596299Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:23.614553Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:23.614783Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:23.635562Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:23.638728Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:23.638878Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:23.641307Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:23.641506Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:23.641594Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:23.642188Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:23.642695Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-10T12:18:23.643735Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:23.643797Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:23.643850Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-10T12:18:23.643898Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:23.643969Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:23.644869Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:23.644927Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:23.644967Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:23.645022Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:23.645065Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:23.645109Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:23.645218Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:23.645276Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:23.645336Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:23.645374Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:23.645416Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:23.645670Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:23.645751Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:23.645920Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:23.646297Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:23.647084Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:23.647124Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-10T12:18:23.647168Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:23.647221Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:23.647267Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:23.647650Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:18:23.647684Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:23.647712Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:23.647749Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:23.647776Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:23.647810Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:23.647875Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-10T12:18:23.647912Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:18:23.647942Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:23.647982Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:23.648029Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:23.648299Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:23.648353Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:23.648538Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:23.648747Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:23.648985Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:23.649167Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:23.653059Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... w have 1 active actors on pipe 2026-01-10T12:18:27.354133Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.354174Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.354222Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with empty session id 2026-01-10T12:18:27.354300Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 6, Error Read prepare request with empty session id 2026-01-10T12:18:27.354333Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with empty session id Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "bad-session" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:179:2192] 2026-01-10T12:18:27.354734Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:225:2225], now have 1 active actors on pipe 2026-01-10T12:18:27.354847Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.354885Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.354931Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id bad-session 2026-01-10T12:18:27.355009Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 7, Error Read prepare request with unknown(old?) session id bad-session 2026-01-10T12:18:27.355040Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id bad-session 2026-01-10T12:18:27.355324Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:228:2227], now have 1 active actors on pipe 2026-01-10T12:18:27.355414Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.355452Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.355510Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1702: [PQ: 72057594037927937] Got cmd delete session: ClientId: "user2" SessionId: "session2" 2026-01-10T12:18:27.355550Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72057594037927937] Destroy direct read session session2 2026-01-10T12:18:27.355624Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.355659Z node 8 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:27.355699Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:27.355730Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.355762Z node 8 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:27.355835Z node 8 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user2 session is set to 0 (startOffset 0) session session2 2026-01-10T12:18:27.355867Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:27.355897Z node 8 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:27.355932Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.356150Z node 8 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:27.358687Z node 8 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:27.358782Z node 8 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:27.358829Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:27.358863Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:27.358894Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:27.358927Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:27.358959Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:27.359002Z node 8 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:27.359064Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "session2" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:179:2192] 2026-01-10T12:18:27.359471Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:234:2232], now have 1 active actors on pipe 2026-01-10T12:18:27.359597Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.359635Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.359680Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2 2026-01-10T12:18:27.359763Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 9, Error Read prepare request with unknown(old?) session id session2 2026-01-10T12:18:27.359795Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2 Publish read Send publish read request: Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.360170Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:237:2234], now have 1 active actors on pipe 2026-01-10T12:18:27.360268Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.360305Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.360401Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.360463Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 10, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.360501Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 214\n RawX2: 34359740585\n}\nCookie: 123\nCmdPublishRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } Forget read Send forget read request: Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.360905Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:239:2236], now have 1 active actors on pipe 2026-01-10T12:18:27.360995Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:27.361030Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:27.361126Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.361190Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 11, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-10T12:18:27.361223Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 214 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 214\n RawX2: 34359740585\n}\nCookie: 123\nCmdForgetRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPQTabletTests::DropTablet_And_Tx >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 13472831551454740492 2026-01-10T12:17:51.129443Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:17:51.155061Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:17:51.155136Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:17:51.157856Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:17:51.174542Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:17:51.177624Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:18:27.819459Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:18:27.819583Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:18:27.879278Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28435, MsgBus: 26291 2026-01-10T12:18:12.341056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706158409266130:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.341126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:12.593977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:12.622056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:12.622144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:12.722782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:12.739488Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:12.795886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:12.922803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:12.922833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:12.922858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:12.923237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:13.356914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:13.448576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:13.460800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:13.538371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.685443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.890201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:13.961825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:15.967796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706171294169834:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.967899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.968455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706171294169844:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:15.968495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.258501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.297148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.331438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.400142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.458700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.500896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.550283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.602032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.683554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706175589138022:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.683628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.683851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706175589138028:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.683884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706175589138027:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.683924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:16.687211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:16.699534Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706175589138031:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:16.809487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706175589138082:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:17.341297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706158409266130:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.341373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:19.480440Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047499491, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 2617, MsgBus: 63705 2026-01-10T12:18:20.401175Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706193221917747:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.401225Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:20.416050Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:20.511579Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:20.514756Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706193221917719:2081] 1768047500400399 != 1768047500400402 2026-01-10T12:18:20.525548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:20.525637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:20.552430Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:20.553669Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:20.610668Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:20.610688Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:20.610696Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:20.610775Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:20.804898Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:21.043673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:21.058496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.122976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.334775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.408910Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:21.416530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.048956Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706210401788783:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.049037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.049289Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706210401788793:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.049331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.106775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.139798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.179806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.210241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.251431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.296876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.350017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.427029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.508229Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706210401789664:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.508346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.508437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706210401789669:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.508750Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706210401789671:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.508799Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.513999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:24.526173Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706210401789672:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:24.607345Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706210401789724:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:25.408192Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706193221917747:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:25.408271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTabletTests::DropTablet_Before_Write >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> KqpYql::EvaluateExpr2 [GOOD] >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit [GOOD] >> TPartitionTests::CorrectRange_Commit >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] Test command err: 2026-01-10T12:18:26.164092Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.247545Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:26.251543Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:26.251874Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.251942Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.252005Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:26.252056Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:26.252153Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.252221Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:26.271872Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:26.272134Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:26.293139Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:26.296279Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:26.296475Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.298161Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:26.298297Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.298357Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.298788Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:26.299116Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-10T12:18:26.299826Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:26.300181Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:26.300232Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-10T12:18:26.300296Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:26.300367Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:26.300942Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:26.301010Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:26.301055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.301095Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:26.301127Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.301172Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.301279Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:26.301327Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:26.301368Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:26.301402Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:26.301445Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:26.301692Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.301758Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:26.301928Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:26.302212Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:26.302866Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:26.302908Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-10T12:18:26.302939Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:26.302971Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:26.303017Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:26.303268Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:18:26.303291Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:26.303312Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.303340Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:26.303361Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.303385Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.303431Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-10T12:18:26.303471Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:18:26.303517Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:26.303548Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:26.303628Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:26.303774Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.303837Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:26.304014Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:26.305562Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:26.305778Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:26.306402Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:26.312937Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.738981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.739008Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:28.739230Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-10T12:18:28.739267Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-10T12:18:28.739298Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-10T12:18:28.739339Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-10T12:18:28.739380Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-10T12:18:28.739416Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-10T12:18:28.739450Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:18:28.739483Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-10T12:18:28.739593Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-10T12:18:28.739635Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-10T12:18:28.739684Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 72057594037950158 tx 67890 2026-01-10T12:18:28.739788Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-10T12:18:28.739938Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:28.740989Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-10T12:18:28.741061Z node 6 :PQ_TX INFO: pq_impl.cpp:2842: [PQ: 72057594037927937] Assume tablet 72057594037950158 dead, sending read set acks for tx 67890 2026-01-10T12:18:28.741108Z node 6 :PQ_TX DEBUG: transaction.cpp:358: [TxId: 67890] Predicate acks 1/1 2026-01-10T12:18:28.741177Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-10T12:18:28.741229Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-10T12:18:28.741287Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-10T12:18:28.741334Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-10T12:18:28.741381Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-10T12:18:28.741515Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-10T12:18:28.741565Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 72057594037950158 2026-01-10T12:18:28.741688Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:237:2230], now have 1 active actors on pipe 2026-01-10T12:18:28.741751Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 72057594037950158 2026-01-10T12:18:28.741865Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 72057594037950158 TabletDest: 72057594037927937 TabletProducer: 72057594037950158 ReadSet: "\010\001" Seqno: 0 2026-01-10T12:18:28.741927Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 72057594037950158 2026-01-10T12:18:28.741971Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-10T12:18:28.742023Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-10T12:18:28.742068Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-10T12:18:28.742115Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-10T12:18:28.742157Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-10T12:18:28.742205Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-10T12:18:28.742303Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-10T12:18:28.742343Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-10T12:18:28.742562Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-10T12:18:28.742612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:28.744176Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:28.744244Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:28.744288Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:28.744321Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.744362Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.744414Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:28.744507Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-10T12:18:28.744556Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-10T12:18:28.744608Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.744638Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:28.744676Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:28.744833Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.744891Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:28.746881Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:28.747017Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:28.747064Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:28.747094Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.747125Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.747160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.747189Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:28.747230Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.747303Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-10T12:18:28.747366Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:18:28.747408Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:18:28.747451Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:18:28.747503Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:18:28.747547Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-10T12:18:28.747591Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-10T12:18:28.747634Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-10T12:18:28.747710Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-10T12:18:28.747773Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 72057594037950158 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:28.747812Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-10T12:18:28.747850Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-10T12:18:28.747888Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-10T12:18:28.747912Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:28.747942Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-10T12:18:28.747976Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-10T12:18:28.748012Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-10T12:18:28.748051Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-10T12:18:28.748140Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:28.749789Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:28.749858Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-10T12:18:28.749901Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-10T12:18:28.749940Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPQTest::TestCmdReadWithLastOffset >> TPartitionTests::GetPartitionWriteInfoError >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPQTabletTests::Huge_ProposeTransacton >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit >> KqpScripting::ScanQueryDisable [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2026-01-10T12:18:28.691273Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.762871Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:28.762958Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:28.763026Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:28.763114Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:28.778763Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:28.779126Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:28.779539Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-10T12:18:28.780481Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:28.780541Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2026-01-10T12:18:28.780585Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] 2026-01-10T12:18:28.780643Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:28.780712Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:28.781231Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:18:28.781285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:28.781337Z node 1 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ChangeConfig]) 2026-01-10T12:18:28.781389Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.781421Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.781463Z node 1 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ChangeConfig]) 2026-01-10T12:18:28.781525Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.781557Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:28.781611Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:28.781868Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:28.781932Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:28.782060Z node 1 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2026-01-10T12:18:28.782150Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|6ab7b3aa-58605708-7f32afa-8cc53b3c_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-10T12:18:28.782202Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:28.782376Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 2026-01-10T12:18:28.782528Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:28.782687Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:28.782760Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:28.782813Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:28.782855Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:28.782913Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.782974Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.783016Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:28.783060Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.783089Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:28.783126Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:28.783181Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2026-01-10T12:18:28.783236Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.783479Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:18:28.783557Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 24. Cookie: 1 2026-01-10T12:18:28.783624Z node 1 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2026-01-10T12:18:28.783661Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:28.783690Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:28.783745Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.783797Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.783833Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:28.783915Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2026-01-10T12:18:28.784747Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 120 count 1 nextOffset 101 batches 1 2026-01-10T12:18:28.784817Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.784861Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:28.784907Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:28.785359Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000? size 106 WTime 127 2026-01-10T12:18:28.785642Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:28.806682Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.837570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:28.837638Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:28.837727Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 24 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:28.837781Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-10T12:18:28.837843Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2026-01-10T12:18:28.838098Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:28.838137Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:28.838174Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.838211Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.838253Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.838285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partit ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit [GOOD] >> TPartitionTests::GetUsedStorage >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] Test command err: 2026-01-10T12:18:25.178635Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.258543Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:25.264731Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:25.265107Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.265169Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.265259Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:25.265304Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:25.265348Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.265415Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.284724Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:25.284946Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:25.305395Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.308316Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.308471Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.309470Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.309644Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:25.310049Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:25.310532Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-10T12:18:25.311567Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:25.311620Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:25.311692Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-10T12:18:25.311745Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:25.311816Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:25.312466Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:25.312519Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.312563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:25.312612Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:25.312653Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.312731Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:25.312826Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:25.312866Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:25.312915Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:25.312958Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:25.312995Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:25.313235Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:25.313306Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:25.313471Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:25.313723Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:25.316599Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:25.316715Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:25.316764Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.316803Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.316862Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.316904Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.316943Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:25.317011Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:25.317389Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:195:2201], now have 1 active actors on pipe 2026-01-10T12:18:25.318058Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:198:2203], now have 1 active actors on pipe 2026-01-10T12:18:25.318201Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-10T12:18:25.318244Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-10T12:18:25.318340Z node 1 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100000} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-10T12:18:25.318482Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:25.320497Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:25.321286Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:25.321635Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:25.321883Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100000} [1:204:2142] 2026-01-10T12:18:25.322719Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:18:25.324045Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:18:25.324343Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:18:25.324468Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100000 to M0000100001 2026-01-10T12:18:25.324766Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:18:25.324856Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100000 to D0000100001 2026-01-10T12:18:25.325105Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataStep 2026-01-10T ... 72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:29.959058Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:29.959090Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:29.959120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-10T12:18:29.959159Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:173: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Blobs compaction is stopped 2026-01-10T12:18:29.959579Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-10T12:18:29.959647Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-10T12:18:29.959696Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-10T12:18:29.959735Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-10T12:18:29.959791Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-10T12:18:29.959835Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-10T12:18:29.959867Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:29.959901Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-10T12:18:29.959943Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-10T12:18:29.959981Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-10T12:18:29.960023Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-10T12:18:29.960070Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-10T12:18:29.960185Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:29.962544Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-10T12:18:29.962607Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-10T12:18:29.962653Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2547: [PQ: 72057594037927937] GetOwnership request for the next Kafka transaction while previous is being deleted. Saving it till the complete delete of the previous tx.%01 2026-01-10T12:18:29.962734Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:29.962779Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-10T12:18:29.962822Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-10T12:18:29.962870Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-10T12:18:29.962909Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 2026-01-10T12:18:29.962977Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-10T12:18:29.963021Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-10T12:18:29.963071Z node 6 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100001} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-10T12:18:29.963182Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:29.965238Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:29.965766Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:29.966113Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:29.966372Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100001} [6:243:2142] 2026-01-10T12:18:29.967230Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:18:29.968303Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:18:29.968563Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:18:29.968679Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100001 to M0000100002 2026-01-10T12:18:29.968911Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:18:29.968991Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100001 to D0000100002 2026-01-10T12:18:29.969167Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataStep 2026-01-10T12:18:29.969217Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:18:29.969267Z node 6 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:29.969315Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:18:29.969356Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:18:29.969396Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:29.969441Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Initializing completed. 2026-01-10T12:18:29.969488Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] init complete for topic 'topic' partition {0, KafkaTransactionWriteId{1, 0}, 100001} generation 2 [6:243:2142] 2026-01-10T12:18:29.969539Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:29.969590Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:29.969633Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-10T12:18:29.969668Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:29.969709Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:29.969743Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:29.969783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:29.969813Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:29.969883Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:29.970030Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:18:29.970222Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|e2edc4be-cb8fd626-445825d6-4681e267_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-10T12:18:29.970283Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:29.970319Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:29.970368Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:29.970402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:29.970443Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:29.970516Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:29.970554Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-10T12:18:29.970594Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:29.970651Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-10T12:18:29.970734Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 5 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort >> TPartitionTests::DataTxCalcPredicateError >> TPQTest::TestWriteSplit >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr2 [GOOD] Test command err: Trying to start YDB, gRPC: 7514, MsgBus: 5613 2026-01-10T12:18:16.366267Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706174991839600:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:16.366378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:16.753059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:16.774211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:16.774831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:16.870933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:16.871826Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706174991839434:2081] 1768047496281616 != 1768047496281619 2026-01-10T12:18:16.880855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:16.969263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:16.990740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:16.990763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:16.990771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:16.990891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:17.414681Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:17.453795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:17.511779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.680558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.844534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.927507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.937325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706187876743194:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.937438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.941226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706187876743204:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.941318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.286217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.323894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.356217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.389057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.424051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.486709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.529037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.617074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.714807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706192171711376:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.714898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.715233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706192171711382:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.715277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706192171711381:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.715307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.719840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:20.736658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706192171711385:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:20.815854Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706192171711436:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:21.365419Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706174991839600:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:21.365492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18908, MsgBus: 21167 2026-01-10T12:18:23.429079Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706206047113846:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:23.429339Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:23.472890Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:23.566812Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706206047113669:2081] 1768047503415098 != 1768047503415101 2026-01-10T12:18:23.574377Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:23.584964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:23.585041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:23.588172Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:23.664030Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:23.664055Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:23.664065Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:23.664189Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:23.755877Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:24.061570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:24.077880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.164754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.302276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.369170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.495847Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:26.685998Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706218932017426:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.686073Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.686320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706218932017435:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.686361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.754047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.815533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.846512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.876618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.911746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.944837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.978268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.024037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.100286Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706223226985601:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.100399Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.100465Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706223226985606:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.100628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706223226985608:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.100690Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.104020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:27.117737Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706223226985610:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:27.177838Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706223226985661:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:28.428455Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706206047113846:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:28.428536Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:14:32.206611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:14:32.206759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:32.206799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:14:32.206832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:14:32.206871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:14:32.206915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:14:32.206974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:14:32.207055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:14:32.207940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:32.208268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:14:32.336931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:14:32.337002Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:14:32.337851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:14:32.352954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:14:32.353437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:14:32.353607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:14:32.363131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:14:32.363498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:14:32.364255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.364753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:14:32.368051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:32.368269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:14:32.369611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:14:32.369673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:14:32.369751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:14:32.369812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:14:32.369856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:14:32.370060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.377280Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:14:32.555467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:14:32.555683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.555861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:14:32.555911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:14:32.556170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:14:32.556248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:14:32.568994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.569370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:14:32.569721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.569791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:14:32.569834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:14:32.569875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:14:32.574009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.574119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:14:32.574167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:14:32.576167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.576226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:14:32.576272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.576333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:14:32.580516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:14:32.582454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:14:32.582655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:14:32.583879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:14:32.584013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:14:32.584064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.584389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:14:32.584448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:14:32.584649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:14:32.584737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:14:32.586711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-10T12:18:28.161632Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-10T12:18:28.161664Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-10T12:18:28.161695Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-10T12:18:28.161722Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-01-10T12:18:28.161750Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-10T12:18:28.163845Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.163959Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.164004Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:18:28.164042Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-10T12:18:28.164104Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-10T12:18:28.166837Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.166959Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.166999Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:18:28.167039Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-10T12:18:28.167078Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-10T12:18:28.167883Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.167982Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.168017Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:18:28.168052Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-10T12:18:28.168112Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-10T12:18:28.169555Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.169658Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-10T12:18:28.169700Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-10T12:18:28.169736Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-10T12:18:28.169775Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 3 2026-01-10T12:18:28.169854Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-10T12:18:28.173013Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:18:28.176365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:18:28.176555Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-10T12:18:28.176687Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-10T12:18:28.178640Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-10T12:18:28.178698Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-10T12:18:28.180869Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-10T12:18:28.181036Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.181083Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:3894:5611] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-10T12:18:28.182791Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-10T12:18:28.182844Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-10T12:18:28.182934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-10T12:18:28.182963Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-10T12:18:28.183029Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-10T12:18:28.183067Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-10T12:18:28.183133Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-10T12:18:28.183165Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-10T12:18:28.183231Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-10T12:18:28.183262Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-10T12:18:28.185649Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-10T12:18:28.185898Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.185946Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:3897:5614] 2026-01-10T12:18:28.186592Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-10T12:18:28.186823Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-10T12:18:28.186939Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.186975Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:3897:5614] 2026-01-10T12:18:28.187172Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-10T12:18:28.187278Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.187318Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:3897:5614] 2026-01-10T12:18:28.187521Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-10T12:18:28.187594Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.187626Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:3897:5614] 2026-01-10T12:18:28.187826Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-10T12:18:28.187862Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:3897:5614] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPartitionTests::ConflictingTxIsAborted >> TPartitionTests::GetUsedStorage [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> KqpScripting::NoAstSizeLimit [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPartitionTests::ShadowPartitionCountersRestore >> KqpYql::SelectNoAsciiValue [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort [GOOD] >> KqpScripting::Pure [GOOD] >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> TPartitionTests::TestNonConflictingActsBatchOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] Test command err: 2026-01-10T12:18:24.896156Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.972182Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:24.975899Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:24.976257Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.976332Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.976389Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:24.976449Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:24.976515Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.976580Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:24.993093Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:24.993340Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:25.011255Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.014117Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.014267Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.015431Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.015615Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:25.016025Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:25.016527Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-10T12:18:25.017544Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:25.017596Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:25.017634Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-10T12:18:25.017690Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:25.017757Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:25.018507Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:25.018750Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.018803Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:25.018862Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:25.018902Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.018951Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:25.019033Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:25.019071Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:25.019111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:25.019147Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:25.019193Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:25.019472Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:25.019563Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:25.019763Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:25.019975Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:25.022535Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:25.022658Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:25.022715Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.022752Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.022783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.022832Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.022865Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:25.022926Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:25.023305Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:195:2201], now have 1 active actors on pipe 2026-01-10T12:18:25.023987Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:198:2203], now have 1 active actors on pipe 2026-01-10T12:18:25.024860Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 2 Data { Immediate: false } 2026-01-10T12:18:25.024938Z node 1 :PQ_TX WARN: pq_impl.cpp:3283: [PQ: 72057594037927937] TxId 2 empty list of operations 2026-01-10T12:18:25.025015Z node 1 :PQ_TX INFO: pq_impl.cpp:4801: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-10T12:18:25.289873Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.337149Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:25.340501Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:25.340730Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.340771Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.340802Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:25.340834Z node 2 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:25.340875Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.340920Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.356608Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [2:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:25.356734Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:25.356979Z node 2 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicNam ... persist 2026-01-10T12:18:30.573437Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.573521Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.573559Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.573599Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.573633Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.596721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.596790Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.596827Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.596879Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.596913Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.617467Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.617543Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.617588Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.617625Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.617670Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.628025Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.649102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.649172Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.649211Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.649255Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.649291Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.671319Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.671383Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.671419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.671456Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.671491Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.696619Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.696685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.696738Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.696786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.696818Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.717674Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.717737Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.717774Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.717815Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.717850Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.738428Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.738484Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.738521Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.738552Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.738578Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.759244Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.759309Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.759354Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.759392Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.759427Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.780110Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.790778Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.790859Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.790895Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.790944Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.790979Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.811593Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.811662Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.811710Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.811763Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.811796Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.832454Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.832521Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.832558Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.832608Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.832640Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.853250Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.853303Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.853330Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.853364Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.853397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.874039Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.874120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.874158Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.874197Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.874233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.937430Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.937494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.937521Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.937563Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.937601Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 10387, MsgBus: 63370 2026-01-10T12:18:16.246219Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706176350771413:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:16.246402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:16.489366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:16.511944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:16.512064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:16.568725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:16.574763Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706176350771273:2081] 1768047496236579 != 1768047496236582 2026-01-10T12:18:16.592559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:16.723291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:16.723314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:16.723320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:16.723410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:16.729580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:17.188879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:17.212230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:17.248309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:17.261178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.415562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.596707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:17.671034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.562117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706189235675037:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.562247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.562526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706189235675047:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.562560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.876193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.929675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.966464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.006421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.044565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.088603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.136232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.226821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.320965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706193530643213:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.321046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.321562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706193530643218:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.321577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706193530643219:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.321630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.325240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:20.338447Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706193530643222:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:20.443359Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706193530643273:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:21.246065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706176350771413:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:21.246138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:22.611049Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047502620 ... 01-10T12:18:23.473609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:23.487386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:23.591305Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:23.593046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:23.600912Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706203651299933:2081] 1768047503464259 != 1768047503464262 2026-01-10T12:18:23.637125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:23.637191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:23.640018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:23.744724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:23.744749Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:23.744757Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:23.744842Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:23.896196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:24.212677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:24.217167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:24.231052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.302458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.460652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.503904Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:24.535437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.753791Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706216536203680:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.753888Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.754121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706216536203689:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.754160Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:26.830255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.862949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.896766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.925714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.974642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.007055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.044493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.094445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.160966Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706220831171855:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.161065Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.161160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706220831171860:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.161199Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706220831171862:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.161227Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.164157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:27.172797Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706220831171864:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:27.231722Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706220831171915:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:28.475622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706203651300073:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:28.475710Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:29.156580Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047509193, txId: 281474976710673] shutting down |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard-DiscardSelectIsOn [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 29275, MsgBus: 31372 2026-01-10T12:18:14.702033Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706166908959223:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:14.702242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:14.753957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:15.012792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:15.013691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:15.013804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:15.038921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:15.230360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:15.272197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:15.304736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:15.304764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:15.304776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:15.304881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:15.714316Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:15.788259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:15.800204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:15.847682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.025883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.220071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:16.309678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.365967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706184088830143:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.366094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.366575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706184088830153:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.366639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:18.762288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.804761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.844737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.873619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.919385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.952592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.985929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.029025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.117227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188383798326:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.117314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.117617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188383798332:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.117685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706188383798331:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.117756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:19.121574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:19.132702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706188383798335:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:19.196018Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706188383798386:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:19.703159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706166908959223:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:19.703223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:21.656957Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our ... 5616 2026-01-10T12:18:23.805830Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706205472946368:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:23.806057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:23.816380Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:23.910713Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:23.916168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:23.916250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:23.923931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:24.016677Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:24.016703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:24.016712Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:24.016802Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:24.067585Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:24.452086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:24.460283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:24.469350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.533855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.784368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.831650Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:24.866283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.326436Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222652817276:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.326525Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.326824Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222652817286:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.326883Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.411554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.449638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.477899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.509525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.540452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.573725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.608511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.658245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.747213Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222652818155:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.747305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.747353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222652818160:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.747475Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222652818162:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.747529Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.751485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:27.762740Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706222652818164:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:27.860730Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706222652818217:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:28.802521Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706205472946368:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:28.802593Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:29.596220Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047509627, txId: 281474976715673] shutting down 2026-01-10T12:18:30.004680Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510033, txId: 281474976715675] shutting down >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2026-01-10T12:18:24.327086Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.409853Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.409936Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.410021Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.410097Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:24.433275Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:181:2194] 2026-01-10T12:18:24.435563Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:24.000000Z 2026-01-10T12:18:24.435856Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:181:2194] 2026-01-10T12:18:24.457506Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.498854Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.519866Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.530492Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.571887Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.619965Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.641432Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.792583Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.820311Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.887008Z node 1 :PERSQUEUE WARN: partition.cpp:3781: [72057594037927937][Partition][0][StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:25.407577Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.467273Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.467329Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.467395Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.467452Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.484244Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:181:2194] 2026-01-10T12:18:25.486138Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:25.000000Z 2026-01-10T12:18:25.486390Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:181:2194] 2026-01-10T12:18:25.509393Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.551132Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.572101Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.583312Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.625059Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.666496Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.687333Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.222554Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.294808Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.294877Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.294933Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.294998Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:26.314664Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [3:183:2196] 2026-01-10T12:18:26.316505Z node 3 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:26.000000Z 2026-01-10T12:18:26.316753Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:183:2196] 2026-01-10T12:18:26.338908Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.381420Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.403354Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.414494Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.456230Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.497556Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.528678Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\320\344\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:26.676656Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.687901Z node 3 :PERSQUEUE WARN: partition.cpp:3014: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (invalid range) Begin 4 End 2 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" ... foResponse 2026-01-10T12:18:30.543317Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.543361Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:30.543422Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.543467Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:30.543520Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:30.543554Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.543585Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:30.543618Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-10T12:18:30.543654Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.543693Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-10T12:18:30.543728Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Wait kv request Wait kv request 2026-01-10T12:18:30.544172Z node 5 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 1, TxId 0 2026-01-10T12:18:30.544220Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:30.544267Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-10T12:18:30.544306Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:30.544345Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2026-01-10T12:18:30.544370Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:30.544404Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:30.565042Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:30.565111Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:30.565192Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:30.565252Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.565293Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-10T12:18:30.565330Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.565384Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:30.565424Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-10T12:18:30.565485Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.565558Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2026-01-10T12:18:30.565598Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:30.565635Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 3 2026-01-10T12:18:30.565669Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-10T12:18:30.565703Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:30.565728Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 4 2026-01-10T12:18:30.565747Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 4 2026-01-10T12:18:30.565771Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.565798Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2026-01-10T12:18:30.565820Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-10T12:18:30.583084Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2026-01-10T12:18:30.583175Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 6 2026-01-10T12:18:30.583205Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (6) 2026-01-10T12:18:30.583260Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.583482Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 6 Got KV request Got KV request Wait tx rollback for tx 0 2026-01-10T12:18:30.583713Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:30.604309Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:30.604400Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:30.604576Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:30.604641Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.604683Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.604725Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.604763Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.604802Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.604849Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait tx committed for tx 3 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2026-01-10T12:18:30.903183Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.943743Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:30.943798Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:30.943846Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.943885Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:30.958447Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [6:183:2196] 2026-01-10T12:18:30.960276Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:30.000000Z 2026-01-10T12:18:30.960347Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [6:183:2196] 2026-01-10T12:18:30.981391Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.022774Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.043761Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.054299Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.096357Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.138509Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.169584Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 21206, MsgBus: 61423 2026-01-10T12:18:20.536674Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706192475168747:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.538272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:20.837085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:20.837219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:20.895958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:20.914686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:20.939045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:21.085674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:21.085726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:21.085742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:21.085856Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:21.154516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:21.550927Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:21.595864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:21.608812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:21.689239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.850954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.027706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.110818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.915227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706205360072467:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.915369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.915816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706205360072477:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.915870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.278641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.313654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.344988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.379429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.413372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.455716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.493450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.561327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.653073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706209655040649:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.653207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.653589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706209655040654:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.653649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706209655040655:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.653791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.660723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:24.674217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706209655040658:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:24.769465Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706209655040709:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:25.536916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706192475168747:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:25.536986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:26.552820Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047506582, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 23421, MsgBus: 15768 2026-01-10T12:18:27.333402Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706223192556847:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:27.333450Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:27.345499Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:27.396195Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:27.443527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:27.443613Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:27.447317Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:27.487085Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:27.487103Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:27.487111Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:27.487184Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:27.499290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:27.869274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:28.338610Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:30.427326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459566:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.427412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.427634Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459577:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.427702Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.449822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.490397Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459672:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.490476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.490651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459674:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.490685Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.519324Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459685:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.519427Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459690:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.519461Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.519626Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236077459692:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.519665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.522575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:30.531341Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706236077459693:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:18:30.589486Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706236077459745:2596] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] >> PQCountersSimple::PartitionLevelMetrics [GOOD] >> PQCountersSimple::PartitionWriteQuota >> PQCountersLabeled::Partition >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMeteringSink::UnusedStorageV1 [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 27274, MsgBus: 22317 2026-01-10T12:18:18.124732Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706184693001862:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:18.124887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:18.405626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:18.441451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:18.441584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:18.508907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706184693001753:2081] 1768047498103919 != 1768047498103922 2026-01-10T12:18:18.525069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:18.533778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:18.639093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:18.639121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:18.639134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:18.639232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:18.658332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:19.072290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:19.088349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:19.132546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:19.140667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.255281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.450045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:19.528004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.525758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706197577905511:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.525885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.526175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706197577905520:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.526204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.924965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.970807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.007327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.048042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.084149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.123409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.166795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.268791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.364001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706201872873690:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.364086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.364486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706201872873695:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.364523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706201872873696:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.364565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.369741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:22.386415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706201872873699:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:22.473718Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706201872873750:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:23.128182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706184693001862:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:23.129776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 25898, MsgBus: 4153 ... ation/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:25.224026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:25.324249Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:25.324819Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:25.324905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:25.337014Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:25.354893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:25.458703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:25.458726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:25.458734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:25.458813Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:25.569252Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:25.841631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:25.847937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:25.862601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.929577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.075725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.142189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.290910Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:28.578818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706225049839817:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.578905Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.579160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706225049839826:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.579218Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.679963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.709375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.744878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.776982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.805704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.836645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.867792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.913704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.983658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706225049840693:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.983732Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.983760Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706225049840698:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.984030Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706225049840700:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.984106Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.986834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:28.996536Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706225049840701:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:29.059536Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706229344808049:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:30.216310Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706212164936211:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:30.216383Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:30.467489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.771550Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510810, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 13086, MsgBus: 4181 2026-01-10T12:18:17.167635Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706180570885671:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.173218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:17.440219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:17.442173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:17.442293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:17.505390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:17.583783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:17.666264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:17.666296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:17.666304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:17.666409Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:17.737070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:18.173404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:18.173554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:18.234205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.392602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.559628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.624206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.570573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706193455789245:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.570673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.571093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706193455789255:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.571179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.947922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.996445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.062069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.109416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.147258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.220487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.261071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.322601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.422251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706197750757426:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.422343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.422617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706197750757431:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.422656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706197750757432:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.422690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.426349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:21.438092Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706197750757435:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:21.534354Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706197750757486:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:22.161561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706180570885671:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:22.161625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:24.762841Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047504580, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 27663, MsgBus: 20164 2026-01-10T12:18:25.681898Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706213929055522:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:25.681941Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:25.704163Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:25.776146Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:25.820386Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:25.820472Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:25.826464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:25.876265Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:25.876294Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:25.876306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:25.876400Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:25.953918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:26.327219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:26.340667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:26.346331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.403633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.526957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.581637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.698459Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:28.946740Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706226813959242:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.946825Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.947041Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706226813959251:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.947083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.011196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.040339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.067715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.096692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.124136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.152364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.182592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.228991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.307069Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706231108927416:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.307151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.307327Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706231108927421:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.307381Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706231108927422:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.307407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.310410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:29.320425Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706231108927425:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:29.379529Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706231108927476:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:30.682881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706213929055522:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:30.682970Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 21839, MsgBus: 7895 2026-01-10T12:18:17.316563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706178786900002:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:17.316699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:17.568321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:17.590540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:17.590669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:17.658884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706178786899963:2081] 1768047497311601 != 1768047497311604 2026-01-10T12:18:17.670830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:17.676687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:17.800765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:17.800789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:17.800799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:17.800881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:17.829787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:18.266008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:18.321842Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:18.347717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.487349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.644679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:18.717667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.709793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706191671803734:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.709914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.710353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706191671803744:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:20.710401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.098815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.142882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.180310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.214753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.253280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.297590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.390805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.442807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:21.523190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706195966771915:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.523303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.523334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706195966771920:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.523509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706195966771922:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.523556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:21.527238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:21.540235Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706195966771923:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:21.596703Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706195966771975:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:22.317932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706178786900002:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:22.317996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:23.388337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.602837Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047503642, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 11561, MsgBus: 5023 2026-01-10T12:18:24.619112Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:24.619346Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:18:24.695285Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:24.700282Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706209783334628:2081] 1768047504596828 != 1768047504596831 2026-01-10T12:18:24.704693Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:24.704775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:24.732649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:24.812724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:24.812751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:24.812759Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:24.812837Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:24.825784Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:25.265645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:25.274849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:25.292534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.353205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.481257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.546138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.655705Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:27.958907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222668238381:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.959022Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.959699Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706222668238392:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.959769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.028922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.060556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.089431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.116511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.149536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.185377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.222239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.279774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.355626Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706226963206556:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.355727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.355853Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706226963206561:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.355925Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706226963206562:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.355978Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.359678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:28.370865Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706226963206565:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:28.466100Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706226963206616:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:30.091551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.877954Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510915, txId: 281474976710675] shutting down |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard-DiscardSelectIsOn [GOOD] Test command err: Trying to start YDB, gRPC: 30132, MsgBus: 29691 2026-01-10T12:18:19.418844Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706186262358793:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:19.419019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:19.648271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:19.658421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:19.658520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:19.692223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:19.731637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:19.853258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:19.853287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:19.853296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:19.853431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:19.875641Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:20.357029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:20.419882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.424656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:20.613067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.795610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:20.876137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.803273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706199147262417:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.803408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.803823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706199147262427:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:22.803924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.132311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.164106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.196370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.235252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.265243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.301811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.340349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.404872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:23.499173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706203442230594:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.499265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.499586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706203442230599:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.499619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706203442230600:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.499754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:23.503943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:23.518941Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706203442230603:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:23.624170Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706203442230654:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:24.418493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706186262358793:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.418559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12282, MsgBus: 7770 2026-01-10T12:18:26.304313Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706218029150526:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:26.304930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:26.321245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:26.423045Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:26.444125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:26.444208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:26.447115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:26.492048Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:26.492067Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:26.492090Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:26.492151Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:26.585180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:26.834654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:26.852705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:26.908742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.035458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.089959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.313817Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:29.216538Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706230914054243:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.216640Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.216882Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706230914054252:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.216930Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.277040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.305483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.341471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.368273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.400637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.432845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.466538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.511612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:29.586246Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706230914055126:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.586327Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.586666Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706230914055131:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.586671Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706230914055132:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.586715Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:29.590458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:29.600740Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706230914055135:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:29.702885Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706230914055186:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:31.302966Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706239503990078:2534], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2026-01-10T12:18:31.304982Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=M2FiNzMyODMtMTg5YmMxNS1jYzM2ZWQ2Zi05YzhiOWY0YQ==, ActorId: [2:7593706239503990071:2530], ActorState: ExecuteState, LegacyTraceId: 01kekxg1nxejjg062hcg9mgwsb, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 13 } message: "DISCARD not supported in YDB queries" end_position { row: 2 column: 13 } issue_code: 2008 severity: 1 } } tx_id# trace_id# 2026-01-10T12:18:31.305652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706218029150526:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:31.305698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTest::DirectReadOldPipe >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TPQTest::TestCmdReadWithLastOffset [GOOD] >> TPQTest::TestAccountReadQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] Test command err: 2026-01-10T12:18:30.531350Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.587070Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:30.591133Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:30.591463Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:30.591533Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:30.591599Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:30.591649Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:30.591711Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.591811Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:30.627948Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-10T12:18:30.628178Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:30.653815Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:30.657344Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:30.657522Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.658584Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:30.658780Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:30.659251Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:30.659745Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-10T12:18:30.660959Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:30.661021Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:30.661093Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-10T12:18:30.661150Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:30.661246Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:30.661911Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:30.661964Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.662011Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.662074Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:30.662122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.662170Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.662249Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:30.662294Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:30.662333Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:30.662369Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:30.662427Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.662702Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:30.662782Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:30.663020Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:30.663233Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:30.666100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:30.666235Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:30.666316Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.666393Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.666439Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.666485Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.666536Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.666588Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:30.667023Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-10T12:18:30.667724Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-10T12:18:30.668775Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-10T12:18:30.668867Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-10T12:18:30.668954Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-10T12:18:30.669009Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-10T12:18:30.669050Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-10T12:18:30.669101Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-10T12:18:30.669181Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-10T12:18:30.669252Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-10T12:18:30.669406Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 179 RawX2: 4294969488 } Partitions { } 2026-01-10T12:18:30.669558Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:30.672281Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:30.672358Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-10T12:18:30.672418Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-10T12:18:30.672476Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-10T12:18:30.676689Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 179 RawX2: 4294969488 } } St ... 01-10T12:18:32.991905Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARED 2026-01-10T12:18:32.991951Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARED 2026-01-10T12:18:32.991992Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARED to PLANNING 2026-01-10T12:18:32.992035Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2026-01-10T12:18:32.992098Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4471: [PQ: 72057594037927937] TxQueue.size 1 2026-01-10T12:18:32.992148Z node 6 :PQ_TX INFO: pq_impl.cpp:650: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2026-01-10T12:18:32.992214Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2026-01-10T12:18:32.992268Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3791: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2026-01-10T12:18:32.992336Z node 6 :PERSQUEUE DEBUG: partition.cpp:1381: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2026-01-10T12:18:32.992401Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:32.992458Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:32.992523Z node 6 :PQ_TX DEBUG: partition.cpp:3048: [Partition][0][StateIdle] TxId 67890 affect consumer user 2026-01-10T12:18:32.992576Z node 6 :PQ_TX DEBUG: partition.cpp:1708: [Partition][0][StateIdle] The long answer to TEvTxCalcPredicate. TxId: 67890 2026-01-10T12:18:32.992618Z node 6 :PQ_TX DEBUG: partition.cpp:1711: [Partition][0][StateIdle] Send TEvTxCalcPredicateResult. TxId: 67890 2026-01-10T12:18:32.992682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:32.992721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:32.992761Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:32.992799Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:32.993025Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-10T12:18:32.993068Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-10T12:18:32.993106Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-10T12:18:32.993148Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-10T12:18:32.993190Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-10T12:18:32.993234Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-10T12:18:32.993281Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:18:32.993320Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-10T12:18:32.993361Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-10T12:18:32.993417Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-10T12:18:32.993461Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 tx 67890 2026-01-10T12:18:32.993568Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-10T12:18:32.993690Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:32.994663Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-10T12:18:32.994717Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 22222 2026-01-10T12:18:32.995769Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:239:2232], now have 1 active actors on pipe 2026-01-10T12:18:32.997121Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:241:2234], now have 1 active actors on pipe 2026-01-10T12:18:32.997373Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\002" Seqno: 0 2026-01-10T12:18:32.997423Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22222 2026-01-10T12:18:32.997464Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-10T12:18:32.997507Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-10T12:18:32.997552Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-10T12:18:32.997591Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-10T12:18:32.997633Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-10T12:18:32.997680Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-10T12:18:32.997782Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-10T12:18:32.997859Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-10T12:18:32.997924Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 22222 2026-01-10T12:18:32.998113Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-10T12:18:32.998159Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:32.998281Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:32.998354Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:32.998403Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:32.998444Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:32.998483Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:32.998545Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:32.998642Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-10T12:18:32.998684Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:32.998719Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:32.998763Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:32.998918Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:32.999006Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:33.004650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:33.004792Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:33.004840Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:33.004874Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.004903Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.004936Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.004966Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:33.005006Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.005083Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-10T12:18:33.005144Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:18:33.005191Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:18:33.005239Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:18:33.005290Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-10T12:18:33.005336Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-10T12:18:33.005382Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-10T12:18:33.005426Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-10T12:18:33.005506Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-10T12:18:33.005564Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:33.005615Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-10T12:18:33.005657Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-10T12:18:33.005709Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 2026-01-10T12:18:33.005750Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:33.005786Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TPartitionTests::DataTxCalcPredicateOrder >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TPartitionTests::ConflictingCommitFails |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts [GOOD] >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] Test command err: 2026-01-10T12:18:28.618377Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.694662Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:28.698595Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:28.698957Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:28.699037Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:28.699091Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:28.699146Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:28.699220Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:28.699305Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:28.717268Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-10T12:18:28.717492Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:28.733466Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:28.736019Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:28.736157Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:28.737578Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:28.737719Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:28.737788Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:28.738182Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:28.738660Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-10T12:18:28.739437Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:28.739476Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-10T12:18:28.739535Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-10T12:18:28.739590Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:28.739648Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:28.740912Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:28.740960Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:28.740994Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:28.741045Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.741090Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.741139Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:28.741194Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:28.741246Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:28.741298Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.741333Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:28.741370Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:28.741602Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:28.741706Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:28.741890Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:28.742211Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:28.742935Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:28.742968Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-10T12:18:28.743005Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:28.743041Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:28.743075Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:28.743419Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:18:28.743453Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:28.743482Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:28.743525Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.743550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.743577Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:28.743619Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-10T12:18:28.743653Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:18:28.743702Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.743732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:28.743773Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:28.743907Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:28.743975Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:28.744155Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:28.744400Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.744632Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:28.744817Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.748199Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :18:33.690130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:33.690651Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Add new write blob: topic 'topic' partition {0, KafkaTransactionWriteId{2, 0}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000? size 64 WTime 1800333 2026-01-10T12:18:33.690899Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:33.691043Z node 6 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100001 offset 0 partNo 0 count 1 size 64 2026-01-10T12:18:33.694794Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100001 offset 0 count 1 size 64 actorID [6:138:2142] 2026-01-10T12:18:33.694951Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 100001 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-10T12:18:33.695009Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:33.695087Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 11 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:33.695150Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {0, KafkaTransactionWriteId{2, 0}, 100001} 2026-01-10T12:18:33.695216Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Answering for message sourceid: '2', Topic: 'topic', Partition: {0, KafkaTransactionWriteId{2, 0}, 100001}, SeqNo: 0, partNo: 0, Offset: 0 is stored on disk 2026-01-10T12:18:33.695404Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:33.695447Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.695490Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.695532Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.695567Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:33.695614Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.695684Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 128 CachedBlobs 2 2026-01-10T12:18:33.695739Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2026-01-10T12:18:33.696504Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-10T12:18:33.696562Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-10T12:18:33.697045Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-10T12:18:33.697675Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:33.697740Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-10T12:18:33.700432Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [6:138:2142] 2026-01-10T12:18:33.700597Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 100000 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-10T12:18:33.700668Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:33.700818Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.700868Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:18:33.700915Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.700980Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.701048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.701088Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-10T12:18:33.701136Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.701522Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 64 CachedBlobs 1 2026-01-10T12:18:33.701660Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-10T12:18:33.701718Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-10T12:18:33.701765Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-10T12:18:33.701885Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:33.704043Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:33.725046Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.756044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:33.756137Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.756183Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.756239Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.756301Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:33.862974Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:34.130837Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:34.130921Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.130966Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:34.131020Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.131057Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:34.151813Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:18:34.182618Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:34.203388Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:34.203452Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.203479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:34.203508Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.203537Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:34.203616Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:34.214161Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:34.214225Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.214248Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:34.214283Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:34.214306Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-10T12:18:34.224591Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpScripting::ScriptStats [GOOD] >> TPartitionTests::EndWriteTimestamp_DataKeysBody ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 8382, MsgBus: 12488 2026-01-10T12:18:06.920853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706132759161578:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:06.921004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:07.426328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:07.426469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:07.469197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:07.500446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:07.504911Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:07.515974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706132759161351:2081] 1768047486856309 != 1768047486856312 2026-01-10T12:18:07.665542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:07.665574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:07.665589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:07.665696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:07.712193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:07.911282Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:08.208533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:08.232426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:08.294759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.471450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.660635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.740294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:10.750728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149939032403:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.750838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.751143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706149939032413:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:10.751187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.140739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.176623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.234632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.270863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.312308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.344572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.384398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.436857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.518575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706154234000582:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.518674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.518855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706154234000588:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.518895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706154234000587:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.518942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.522643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:11.533528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706154234000591:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:11.622434Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706154234000642:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:11.912268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706132759161578:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:11.912346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:13.899303Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047493870, ... iscarding snapshot; our snapshot: [step: 1768047510502, txId: 281474976715708] shutting down 2026-01-10T12:18:30.505675Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510502, txId: 281474976715707] shutting down 2026-01-10T12:18:30.549466Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=NTU0MmYxZWEtNzQ3ZTA2ZmQtYThkZmQyOWUtZjMwY2RhM2Y= } 2026-01-10T12:18:30.646961Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 97ms, session id ydb://session/3?node_id=2&id=YTQ5ZDNmODAtODc4MWUzMmMtMWNmODlkZmQtYjMwMGQ1N2M= } 2026-01-10T12:18:30.662728Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510684, txId: 281474976715712] shutting down 2026-01-10T12:18:30.670171Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510691, txId: 281474976715711] shutting down 2026-01-10T12:18:30.763801Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=2&id=NzkwODdkMTQtYzMzZTAwYmEtZGNkNmFkMTMtNmIwOGY5NWE= } 2026-01-10T12:18:30.855822Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=NjE4Y2YwODEtYTY4OTQ2OC01ZmUxMDcxYi05NjJjNWI4 } 2026-01-10T12:18:30.863508Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510894, txId: 281474976715715] shutting down 2026-01-10T12:18:30.890809Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047510887, txId: 281474976715716] shutting down 2026-01-10T12:18:30.979920Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 106ms, session id ydb://session/3?node_id=2&id=NzVhNzRhNzctOTQ1MTBjMTUtODcwYzBiNTEtZjI4YmZhNzY= } 2026-01-10T12:18:31.031698Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511062, txId: 281474976715719] shutting down 2026-01-10T12:18:31.077251Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=ZTRmMzIwYzMtZjU5ZGM4YmEtNWNhZDBmYTktMzlhYmY0ZjI= } 2026-01-10T12:18:31.160578Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511188, txId: 281474976715722] shutting down 2026-01-10T12:18:31.161102Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511188, txId: 281474976715721] shutting down 2026-01-10T12:18:31.297449Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=MzRkZmNkYi1jMjY3YjFjZC0yZjJjYzFiOS1lODFiOWExMg== } 2026-01-10T12:18:31.385325Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511412, txId: 281474976715725] shutting down 2026-01-10T12:18:31.421798Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 118ms, session id ydb://session/3?node_id=2&id=OGJhYThkNzQtNzAyZjAxYmItYzJhZDZhNzQtZmViZWUzMDk= } 2026-01-10T12:18:31.532864Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511559, txId: 281474976715728] shutting down 2026-01-10T12:18:31.533852Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511559, txId: 281474976715727] shutting down 2026-01-10T12:18:31.668143Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 124ms, session id ydb://session/3?node_id=2&id=NDZmMTIzYmYtNzlhMzk0OWEtMjdlZjFmOWMtNzhiZDM1NWQ= } 2026-01-10T12:18:31.755198Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511783, txId: 281474976715731] shutting down 2026-01-10T12:18:31.798890Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=NjQ5ZDYxYjYtNjU2MGNjNTQtMzMxNTIyM2EtODI2M2EwMWI= } 2026-01-10T12:18:31.913536Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511937, txId: 281474976715733] shutting down 2026-01-10T12:18:31.913920Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047511937, txId: 281474976715734] shutting down 2026-01-10T12:18:32.080432Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 133ms, session id ydb://session/3?node_id=2&id=YzBiM2FiZTctYTFiOTEwNmUtY2I3NzIxODQtZGFhNzE5Mjc= } 2026-01-10T12:18:32.136151Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512168, txId: 281474976715737] shutting down 2026-01-10T12:18:32.202159Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 136ms, session id ydb://session/3?node_id=2&id=NGMxZmFiODItOGYzYTU1MTgtM2ZkNGY1N2UtY2M2YWQ3ZTU= } 2026-01-10T12:18:32.253653Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512287, txId: 281474976715739] shutting down 2026-01-10T12:18:32.396806Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 139ms, session id ydb://session/3?node_id=2&id=MjBhNTk2NjAtZGIyZTM3Ni00Nzc5OWJlYi1iMzIwNDBhZA== } 2026-01-10T12:18:32.421709Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512448, txId: 281474976715741] shutting down 2026-01-10T12:18:32.499138Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 142ms, session id ydb://session/3?node_id=2&id=YThkOWZhM2ItYWI0MTJlN2ItYTI5ZDlmY2YtYWFjMWFmOA== } 2026-01-10T12:18:32.586493Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512602, txId: 281474976715743] shutting down 2026-01-10T12:18:32.649623Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 145ms, session id ydb://session/3?node_id=2&id=NDgyNWJkYTEtMjY2OTY0YzEtNGU4MzczNzctZDFkOWQzYjY= } 2026-01-10T12:18:32.791856Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512826, txId: 281474976715746] shutting down 2026-01-10T12:18:32.792447Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047512826, txId: 281474976715745] shutting down 2026-01-10T12:18:32.801685Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 148ms, session id ydb://session/3?node_id=2&id=ZTA3N2RjN2EtYzlmODNjZDItZmQ1NjY4YWMtYTc5OGEwYTM= } 2026-01-10T12:18:32.951997Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 151ms, session id ydb://session/3?node_id=2&id=YWYxMDQ3YzctMWFlYjBhMzgtZjRlMzNhMjQtMmVkMmE2OGI= } 2026-01-10T12:18:33.004513Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513036, txId: 281474976715749] shutting down 2026-01-10T12:18:33.110022Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 154ms, session id ydb://session/3?node_id=2&id=MWE5YmMxN2MtODQ0ZWZjMzAtOGVmNTg4NWItYzM3OTEzM2I= } 2026-01-10T12:18:33.173824Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513204, txId: 281474976715751] shutting down 2026-01-10T12:18:33.268472Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 157ms, session id ydb://session/3?node_id=2&id=MTA1ODk5YjktYThhYmJjNTAtOGQ0YjZlNzgtMzUwMmMwMjQ= } 2026-01-10T12:18:33.325263Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513351, txId: 281474976715753] shutting down 2026-01-10T12:18:33.482190Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 160ms, session id ydb://session/3?node_id=2&id=NDNiOTg3ODktYjg1NWVlNmQtMzExM2NhMGUtY2MzNjRjZmI= } 2026-01-10T12:18:33.487599Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513512, txId: 281474976715755] shutting down 2026-01-10T12:18:33.644350Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 163ms, session id ydb://session/3?node_id=2&id=NWFhMDM0NTAtY2I5OTQ1MDctMzk2ODAwNDktNGUyNzIzYTk= } 2026-01-10T12:18:33.689245Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513722, txId: 281474976715758] shutting down 2026-01-10T12:18:33.689871Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513722, txId: 281474976715757] shutting down |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease >> TPartitionTests::EndWriteTimestamp_FromMeta |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> TContinuousBackupWithRebootsTests::Basic |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 61415, MsgBus: 19499 2026-01-10T12:18:20.717070Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706192896635504:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:20.719529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:21.008548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:21.062790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:21.062944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:21.107367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:21.155628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:21.167589Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706192896635387:2081] 1768047500700344 != 1768047500700347 2026-01-10T12:18:21.321572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:21.348757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:21.348785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:21.348792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:21.348894Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:21.730285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:21.807726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:21.819325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:18:21.865701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.001878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.188068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:22.268479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.141831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706210076506454:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.141987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.142455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706210076506464:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.142557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.531230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.571510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.612715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.648650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.683903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.727949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.774163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.824698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:24.905930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706210076507333:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.906025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.906407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706210076507338:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.906458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706210076507339:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.906484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:24.910746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:24.921134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706210076507342:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:24.980303Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706210076507393:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:25.716184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706192896635504:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:25.716317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16009, MsgBus: 16444 2026-01-10T12:18:27.371714Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsAct ... iled to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:27.453828Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:27.455096Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706223768800997:2081] 1768047507370399 != 1768047507370402 2026-01-10T12:18:27.489222Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:27.489313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:27.492866Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:27.542898Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:27.542921Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:27.542929Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:27.543004Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:27.564797Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:27.901832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:27.917704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:27.969613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.099716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.152914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.379951Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:30.413162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236653704753:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.413245Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.413508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236653704762:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.413562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.466316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.529274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.556982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.583866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.611213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.639551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.669046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.713666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.778419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236653705633:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.778499Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236653705638:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.778554Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.778768Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706236653705641:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.778810Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:30.782017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:30.791751Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706236653705640:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:18:30.883649Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706236653705693:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:32.372188Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706223768801028:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:32.372268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:32.995375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:33.515902Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047513526, txId: 281474976715676] shutting down 2026-01-10T12:18:34.075347Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047514093, txId: 281474976715680] shutting down 2026-01-10T12:18:34.442329Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047514436, txId: 281474976715684] shutting down |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPartitionTests::TestTxBatchInFederation >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2026-01-10T12:18:24.918026Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.018197Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.018294Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.018394Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.018474Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.041966Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.070443Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:205:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:25.071585Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:213:2166] 2026-01-10T12:18:25.074732Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:213:2166] 2026-01-10T12:18:25.077169Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:214:2166] 2026-01-10T12:18:25.079312Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:214:2166] 2026-01-10T12:18:25.091656Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.092192Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ece36c5f-fc6b5ca9-b29a6111-8c566d75_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:25.100436Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.100935Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1f64a743-4ea8e0c8-b9a40b7-f77c5709_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:25.107956Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.108477Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f171f69e-cb33401c-d1272019-59095abc_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:25.115608Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.116105Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b5eea8a0-e4c582a2-9a01b306-ed1a4381_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:25.118230Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.118742Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9644d20-34fbf920-5bd916ed-ef321a00_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 ACTUAL:
subsystem=SLI:
    Account=asdfgs:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    Account=total:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    sensor=Write:
        Account=asdfgs:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
subsystem=partitionWriteQuotaWait:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
subsystem=readSession:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
subsystem=readTimeLag:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=TimeLags:
                                    Interval=10000ms: 0
                                    Interval=1000ms: 0
                                    Interval=100ms: 0
                                    Interval=10ms: 0
                                    Interval=180000ms: 0
                                    Interval=2000ms: 0
                                    Interval=200ms: 0
                                    Interval=20ms: 0
                                    Interval=30000ms: 0
                                    Interval=5000ms: 0
                                    Interval=500ms: 0
                                    Interval=50ms: 0
                                    Interval=60000ms: 0
                                    Interval=9999999ms: 0
                    OriginDC=cluster:
                        Client=user:
 
...
206:2218] txId 12345 config:
CacheSize: 10485760
PartitionConfig {
  MaxCountInPartition: 20000000
  MaxSizeInPartition: 104857600
  LifetimeSeconds: 0
  LowWatermark: 6291456
  SourceIdLifetimeSeconds: 3600
  MaxWriteInflightSize: 90000000
}
PartitionIds: 0
PartitionIds: 1
TopicName: "topic"
Version: 39
LocalDC: true
Topic: "topic"
TopicPath: "/topic"
YcCloudId: "somecloud"
YcFolderId: "somefolder"
YdbDatabaseId: "PQ"
YdbDatabasePath: "/Root/PQ"
Partitions {
  PartitionId: 0
}
Partitions {
  PartitionId: 1
}
FederationAccount: "federationAccount"
MeteringMode: METERING_MODE_RESERVED_CAPACITY
AllPartitions {
  PartitionId: 0
}
AllPartitions {
  PartitionId: 1
}
Consumers {
  Name: "user"
  ReadFromTimestampsMs: 0
  Generation: 39
}
2026-01-10T12:18:35.083566Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [15:214:2166]
2026-01-10T12:18:35.084393Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [15:214:2166]
2026-01-10T12:18:35.085167Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [15:215:2166]
2026-01-10T12:18:35.085756Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [15:215:2166]
2026-01-10T12:18:35.090951Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId:  error: new GetOwnership request needed for owner 
2026-01-10T12:18:35.091268Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1878cbec-ffd60e67-1844913c-28a13db2_0 generated for partition 0 topic 'topic' owner default
2026-01-10T12:18:35.098514Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-10T12:18:35.099005Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f6912423-197efb58-1dc3bd88-29792b36_1 generated for partition 0 topic 'topic' owner default
2026-01-10T12:18:35.106335Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-10T12:18:35.106793Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2f2051eb-4d93e5f6-4cbdc70f-7e65ed90_2 generated for partition 0 topic 'topic' owner default
2026-01-10T12:18:35.113784Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-10T12:18:35.114228Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|31462028-6943a6a9-6657d026-2fc76b0d_3 generated for partition 0 topic 'topic' owner default
Got start offset = 0
COUNTERS: 
subsystem=SLI:
    Account=federationAccount:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    Account=total:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    sensor=Write:
        Account=federationAccount:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
2026-01-10T12:18:35.491751Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.555432Z node 16 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:35.555509Z node 16 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:35.555617Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:35.555694Z node 16 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:35.579866Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:35.580812Z node 16 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 40 actor [16:205:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 40 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 40 } 2026-01-10T12:18:35.581502Z node 16 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [16:214:2166] 2026-01-10T12:18:35.585363Z node 16 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [16:214:2166] 2026-01-10T12:18:35.592043Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:35.592580Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|75415c21-24034e6a-6b1f74f9-a68d8abc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-10T12:18:35.602958Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:35.603375Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bbcf059f-9eb7649a-b8389521-5fdaf1b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:35.625068Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.666490Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.690683Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.701208Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.742437Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.783875Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.815031Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-10T12:18:35.947217Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:35.947553Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9ed06187-334c185c-29a89581-3f127f87_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:35.979037Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.030534Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-10T12:18:36.233709Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:36.234009Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|136b3abb-67d9827d-8e77d0d9-90b2425_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:36.286015Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.296627Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-10T12:18:36.490531Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:36.490959Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2ee532ab-898f278b-761b18f3-79a27146_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:36.575034Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-10T12:18:36.747281Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:36.747578Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1270a42e-89ee3b07-bb9ca02d-3808d2c0_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:36.800160Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.882442Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 Got start offset = 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 1643, MsgBus: 29482 2026-01-10T12:18:03.325884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706119261462555:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:03.326072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:03.556149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:03.597224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:03.597508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:03.650662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:03.652453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:03.654822Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706119261462508:2081] 1768047483323308 != 1768047483323311 2026-01-10T12:18:03.729474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:03.729499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:03.729515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:03.729684Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:03.852309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:04.189573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:04.196607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:18:04.257128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:04.349988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:04.407387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:04.580272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:04.660474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.217442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706136441333569:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:07.217561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:07.218091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706136441333579:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:07.218150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:07.634624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.674898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.711497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.741762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.780278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.824223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.904298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:07.955784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.039126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140736301750:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.039194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.039379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140736301755:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.039402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706140736301756:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.039476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:08.043103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:08.060611Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706140736301759:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:08.126953Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706140736301810:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:08.328204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706119261462555:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:08.328397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:09.712240Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=YzdiMGM4NGItZGU2YzJlN2EtNDEyZTIyMS1lZTdj ... $9))))))))))) )))) ) trace_id# 2026-01-10T12:18:34.350979Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-10T12:18:34.350997Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 2 from task: 2 with index: 0 2026-01-10T12:18:34.351007Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 3 from task: 3 with index: 0 2026-01-10T12:18:34.351017Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 4 from task: 4 with index: 0 2026-01-10T12:18:34.351029Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 5 from task: 5 with index: 0 2026-01-10T12:18:34.351037Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 6 from task: 6 with index: 0 2026-01-10T12:18:34.356513Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:291} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 6 trace_id# 2026-01-10T12:18:34.375090Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706239520551431:3113]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQJzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047514","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1753187599 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1753187599 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1753187599 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1753187599 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1753187599 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1753187599 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (305788205ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 305788205,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"3d99c364-38d4a47d-4a238463-3aa4f8c4","version":"1.0"} 2026-01-10T12:18:34.376867Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706239520551431:3113], duration: 2.196395s 2026-01-10T12:18:34.376903Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706239520551431:3113], owner: [1:7593706136441333528:2383], status: SUCCESS, issues: , uid: 3d99c364-38d4a47d-4a238463-3aa4f8c4 2026-01-10T12:18:34.377030Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706149326236877:2683], status: SUCCESS, compileActor: [1:7593706239520551431:3113] 2026-01-10T12:18:34.377185Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1753187599 + 0;\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1753187599 + 1;\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1753187599 + 2;\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1753187599 + 3;\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1753187599 + 4;\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1753187599 + 5;\n\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\n (305788205ul, \"New\");\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:18:34.377440Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706149326236877:2683], queryUid: 3d99c364-38d4a47d-4a238463-3aa4f8c4, status:SUCCESS still compiling... 0 still active sessions ... 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2026-01-10T12:18:28.736888Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.820657Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:28.820745Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:28.820827Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:28.820901Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:28.840432Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:28.840689Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:28.840981Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [1:180:2193] 2026-01-10T12:18:28.841904Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:28.841963Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2026-01-10T12:18:28.842015Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:180:2193] 2026-01-10T12:18:28.842093Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:28.842164Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:28.842217Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process pending events. Count 0 2026-01-10T12:18:28.842255Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:28.842306Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.842365Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.842500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.842550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-10T12:18:28.842647Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:28.842789Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|6c0ae89-5655e824-7c427e90-969124d1_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-10T12:18:28.842878Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:28.842928Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:28.842976Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.843030Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.843111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:28.843191Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.843250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-10T12:18:28.843294Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-10T12:18:28.843349Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2026-01-10T12:18:28.843575Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.843768Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:18:28.843837Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:28.843874Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:28.843955Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:28.844003Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.844059Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:28.844195Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2026-01-10T12:18:28.845139Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2026-01-10T12:18:28.845252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:28.845292Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-10T12:18:28.845359Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-10T12:18:28.845903Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000? size 104 WTime 127 2026-01-10T12:18:28.846195Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:28.867190Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.898315Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:28.898420Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:28.898543Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:28.898647Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2026-01-10T12:18:28.898744Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2026-01-10T12:18:28.898992Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:28.899064Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.899106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.899167Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.899220Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-10T12:18:28.899285Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-10T12:18:28.909692Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.930603Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-10T12:18:28.930678Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.930730Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:28.930783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:28.930826Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-10T12:18:28.930935Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.941457Z node ... 35.285363Z node 3 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2026-01-10T12:18:35.285430Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 12 2026-01-10T12:18:35.285492Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:35.285539Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:35.285592Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx rollback for tx 8 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "[TxId: (empty maybe), Topic: \'Root/PQ/rt3.dc1--account--topic\', Partition 0, SourceId \'src2\', SeqNo 10] MinSeqNo violation failure. SeqNo 5" } Wait tx committed for tx 12 2026-01-10T12:18:35.286321Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:35.307108Z node 3 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:35.318276Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:35.318479Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-10T12:18:35.318539Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:35.318604Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:35.318659Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:35.318710Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:35.318751Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:35.318828Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:35.762000Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.807301Z node 4 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:35.807346Z node 4 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:35.807390Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:35.807425Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:35.822915Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [4:184:2196] 2026-01-10T12:18:35.824662Z node 4 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:35.000000Z 2026-01-10T12:18:35.824890Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:184:2196] 2026-01-10T12:18:35.835397Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.866425Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.887185Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.918243Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.949157Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:35.969806Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.021114Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.093096Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.557128Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.610601Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:36.610686Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:36.610738Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:36.610798Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:36.629211Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [5:184:2196] 2026-01-10T12:18:36.631295Z node 5 :PERSQUEUE INFO: partition_init.cpp:1016: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:36.631564Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:184:2196] 2026-01-10T12:18:36.642181Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.673402Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.694380Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.725806Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.757010Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.777779Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.829391Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:36.901699Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.372690Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.422795Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:37.422840Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:37.422872Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:37.422905Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:37.438771Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [6:183:2196] >>>> ADD BLOB 0 writeTimestamp=2026-01-10T12:18:37.430751Z >>>> ADD BLOB 1 writeTimestamp=2026-01-10T12:18:37.430776Z >>>> ADD BLOB 2 writeTimestamp=2026-01-10T12:18:37.430794Z >>>> ADD BLOB 3 writeTimestamp=2026-01-10T12:18:37.430808Z >>>> ADD BLOB 4 writeTimestamp=2026-01-10T12:18:37.430821Z >>>> ADD BLOB 5 writeTimestamp=2026-01-10T12:18:37.430838Z >>>> ADD BLOB 6 writeTimestamp=2026-01-10T12:18:37.430852Z >>>> ADD BLOB 7 writeTimestamp=2026-01-10T12:18:37.430868Z >>>> ADD BLOB 8 writeTimestamp=2026-01-10T12:18:37.430882Z >>>> ADD BLOB 9 writeTimestamp=2026-01-10T12:18:37.430899Z 2026-01-10T12:18:37.442426Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:37.000000Z 2026-01-10T12:18:37.442696Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [6:183:2196] 2026-01-10T12:18:37.463624Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.504827Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.525595Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.535987Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.577182Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.618467Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:37.649444Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> AssignTxId::Basic [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead |91.3%| [TA] $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] >> TableWriter::Backup [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2026-01-10T12:18:35.030946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706257248484914:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:35.033546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:35.060717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:35.347094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:35.347204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:35.353164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:35.400364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:35.442006Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:35.446108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706257248484889:2081] 1768047515029905 != 1768047515029908 2026-01-10T12:18:35.672757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:35.689160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:35.689207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:35.689215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:35.689285Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:35.932035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:36.042764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:37.770417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706265838420373:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:37.770575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:37.771793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706265838420383:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:37.771845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:38.271815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-01-10T12:18:38.287576Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2026-01-10T12:18:38.287684Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2026-01-10T12:18:38.290903Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2026-01-10T12:18:38.290969Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:245: [controller 72075186224037888][TxInit] Execute 2026-01-10T12:18:38.291797Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:250: [controller 72075186224037888][TxInit] Complete 2026-01-10T12:18:38.291820Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:128: [controller 72075186224037888] SwitchToWork 2026-01-10T12:18:38.299572Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:171: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:19686" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-10T12:18:38.299907Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:19686" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-10T12:18:38.300010Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:18:38.300567Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2026-01-10T12:18:38.300606Z node 1 :REPLICATION_CONTROLLER INFO: tx_create_replication.cpp:68: [controller 72075186224037888][TxCreateReplication] Discover tenant nodes: tenant# /Root 2026-01-10T12:18:38.301236Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:335: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2026-01-10T12:18:38.301322Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:359: [controller 72075186224037888] Create session: nodeId# 1 2026-01-10T12:18:38.350506Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-10T12:18:38.350562Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2026-01-10T12:18:38.350801Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-10T12:18:38.350925Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-10T12:18:38.350975Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2026-01-10T12:18:38.351393Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2026-01-10T12:18:38.367303Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2026-01-10T12:18:38.367369Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2026-01-10T12:18:38.367417Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2026-01-10T12:18:38.367494Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:18:38.368187Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2026-01-10T12:18:38.368701Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-10T12:18:38.368985Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2026-01-10T12:18:38.369020Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-10T12:18:38.369073Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-10T12:18:38.369357Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2026-01-10T12:18:38.369374Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-10T12:18:38.369405Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-10T12:18:38.369624Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2026-01-10T12:18:38.369652Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-10T12:18:38.370017Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-10T12:18:38.370276Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2026-01-10T12:18:38.370324Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2026-01-10T12:18:38.370360Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-10T12:18:38.370577Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2026-01-10T12:18:38.370610Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2026-01-10T12:18:38.370888Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2026-01-10T12:18:38.370963Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-10T12:18:38.371016Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2026-01-10T12:18:38.371057Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2026-01-10T12:18:38.371102Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2026-01-10T12:18:38.371123Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2026-01-10T12:18:38.371389Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] Test command err: 2026-01-10T12:18:24.639871Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.734769Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.734851Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.734944Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.735013Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:24.754984Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [1:181:2194] 2026-01-10T12:18:24.759706Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:24.000000Z 2026-01-10T12:18:24.759807Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [1:181:2194] 2026-01-10T12:18:24.781832Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.823963Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.845496Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.859006Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.900922Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.942526Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.963555Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.100527Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|c1d16835-5a86dfa2-dde2f6c8-655eaf81_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-10T12:18:25.113332Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.134133Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.357607Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-10T12:18:25.403514Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-10T12:18:25.690728Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.870096Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-10T12:18:26.007244Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 2026-01-10T12:18:26.317727Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2026-01-10T12:18:26.628298Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-10T12:18:26.905595Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.043032Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-10T12:18:27.209330Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-10T12:18:27.472055Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-10T12:18:27.848390Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.272944Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.316593Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:28.316684Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:28.316740Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:28.316801Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:28.331454Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [2:184:2196] 2026-01-10T12:18:28.332935Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:28.000000Z 2026-01-10T12:18:28.333020Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:184:2196] 2026-01-10T12:18:28.343507Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.376692Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.397391Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.428367Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.460811Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.481713Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.533622Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.606069Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.680107Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|ebf7260c-5bf44eb-63e2db13-8a5e5a56_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-10T12:18:28.692390Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.878323Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:28.909510Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-10T12:18:29.149518Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-10T12:18:29.411450Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-10T12:18:29.455193Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 2026-01-10T12:18:29.785725Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2026-01-10T12:18:30.047905Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-10T12:18:30.286626Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.451392Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-10T12:18:30.547910Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-10T12:18:30.815246Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-10T12:18:31.173523Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.700727Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2: ... [StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.377707Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.398374Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.398474Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.398539Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.398590Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.398652Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.398688Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.419402Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.419493Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.419547Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.419585Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.419667Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.419712Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.440324Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.440402Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.440489Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.440528Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.440578Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.440617Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.461248Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:39.471945Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.472044Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.472128Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.472169Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.472212Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.472259Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.492898Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.493012Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.493080Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.493131Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.493193Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.493231Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.513920Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.514037Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.514107Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.514160Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.514205Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.514241Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.534911Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.534997Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.535050Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.535090Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.535152Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.535190Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.567353Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.567449Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.567543Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.567609Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.567680Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.567717Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.567911Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-10T12:18:39.567983Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.568044Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.568118Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.568177Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.568260Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:39.568322Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-10T12:18:39.568420Z node 5 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2026-01-10T12:18:39.568485Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-10T12:18:39.568557Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:39.568606Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:39.568663Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-10T12:18:39.569078Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:39.600397Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:39.600499Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:39.600650Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-10T12:18:39.600714Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:39.600768Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:39.600859Z node 5 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 2 affect SourceId sourceid 2026-01-10T12:18:39.600948Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.601007Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:39.601075Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:39.601142Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:39.601204Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::ConflictingCommitFails [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] |91.3%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] |91.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2026-01-10T12:18:41.108175Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2026-01-10T12:18:29.872400Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:29.941946Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:29.942033Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:29.942108Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:29.942174Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:29.960845Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-10T12:18:29.962587Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:29.000000Z 2026-01-10T12:18:29.962818Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-10T12:18:29.983874Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.025337Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.046076Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.056454Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.097726Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.138987Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.159704Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\374\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:30.311181Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.331953Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\374\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\374\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:30.701251Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.749591Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:30.749636Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:30.749685Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.749721Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:30.763689Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:181:2194] 2026-01-10T12:18:30.765476Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:30.000000Z 2026-01-10T12:18:30.765706Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:181:2194] 2026-01-10T12:18:30.786741Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.827816Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.848485Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.858855Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.899866Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.941102Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:30.961760Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\203\340\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:31.118332Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.138988Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\203\340\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\203\340\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\203\340\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\203\340\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:31.506037Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.547331Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:31.547386Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:31.547432Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:31.547475Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:31.894198Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candida ... RSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:40.795072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:40.795110Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:40.795141Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:40.795177Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:40.795217Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:40.815853Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:40.816029Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-10T12:18:40.816111Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:40.816176Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:40.816230Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 0 affect SourceId src1 2026-01-10T12:18:40.816305Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:40.816347Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:40.816391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:40.816434Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:40.816476Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait 1st KV request Wait kv request 2026-01-10T12:18:40.816633Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-10T12:18:40.816681Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:40.816722Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:40.816763Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:40.816801Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:40.816852Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:40.816895Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-10T12:18:40.817098Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2026-01-10T12:18:40.817152Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-10T12:18:40.817198Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:40.817238Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:40.817283Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got batch complete: 1 Got KV request Got KV request Got KV request Wait tx committed for tx 0 2026-01-10T12:18:40.817642Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:40.828016Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:40.828125Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:40.828227Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-10T12:18:40.828280Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:40.828319Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:40.828375Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:40.828402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:40.828431Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 3, PendingWrites: 0 2026-01-10T12:18:40.828466Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:40.828523Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:40.828592Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 20 2026-01-10T12:18:40.828636Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:40.828667Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 30 2026-01-10T12:18:40.828694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:40.828800Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2026-01-10T12:18:40.829755Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2026-01-10T12:18:40.829830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 3 2026-01-10T12:18:40.829871Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (3) 2026-01-10T12:18:40.829917Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:40.830411Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][0][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 1 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000? size 70 WTime 10239 2026-01-10T12:18:40.830637Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 3 Got KV request Got KV request Wait 2nd KV request Wait kv request 2026-01-10T12:18:40.830883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:40.851764Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:40.851842Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:40.851914Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:40.851970Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:18:40.852037Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 1 is already written 2026-01-10T12:18:40.852093Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:18:40.852129Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 1 is already written 2026-01-10T12:18:40.852164Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:18:40.852197Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2026-01-10T12:18:40.852403Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:40.852451Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:40.852488Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:40.852528Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:40.852564Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:40.852620Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TDatabaseResolverTests::PostgreSQL |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2026-01-10T12:18:41.161469Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2026-01-10T12:18:41.160244Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2026-01-10T12:18:41.467372Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2026-01-10T12:18:41.517558Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2026-01-10T12:18:41.581252Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::DropTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2026-01-10T12:18:41.800357Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TPQTabletTests::DropTablet [GOOD] >> TKeyValueTest::TestWriteReadPatchRead >> KeyValueReadStorage::ReadRangeOk1Key >> TKeyValueTest::TestRewriteThenLastValue >> TPQTabletTests::Cancel_Tx >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueTest::TestConcatWorks >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] |91.4%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [1:125:2150] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:134:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-01-10T12:17:19.666614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:19.666697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.666751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:19.666832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:19.666870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:19.666903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:19.666987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.667059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:19.667889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:19.668200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:19.790069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:17:19.790139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:19.791035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:188:2058] recipient: [1:15:2062] 2026-01-10T12:17:19.802139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:19.802681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:19.802861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:19.808644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:19.808840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:19.809552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.810224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:19.812664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.812860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:19.814036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:19.814106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.814340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:19.814394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:19.814440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:19.814541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.971553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.972578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.972703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.972807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.972934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... LAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [51:210:2210], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2026-01-10T12:18:41.115800Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1056 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-10T12:18:41.115925Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1056 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-10T12:18:41.116461Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:41.116512Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:18:41.117017Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:41.117113Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:41.117148Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-10T12:18:41.117187Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:18:41.117225Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:18:41.117313Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-01-10T12:18:41.117601Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 219043334821 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:18:41.117642Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-01-10T12:18:41.117755Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 219043334821 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:18:41.117805Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:18:41.117896Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 740 RawX2: 219043334821 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:18:41.117960Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:18:41.117999Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:41.118042Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:18:41.118089Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-10T12:18:41.121485Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:41.122927Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-10T12:18:41.123063Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:41.123379Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:41.123426Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-10T12:18:41.123528Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:18:41.123566Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:41.123607Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:18:41.123636Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:41.123673Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-10T12:18:41.123713Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:41.123750Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-10T12:18:41.123782Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-10T12:18:41.123903Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-10T12:18:41.126547Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-10T12:18:41.126599Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-10T12:18:41.126978Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-10T12:18:41.127060Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-10T12:18:41.127095Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:859:2830] TestWaitNotification: OK eventTxId 1003 2026-01-10T12:18:41.127545Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:18:41.127751Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 242us result status StatusSuccess 2026-01-10T12:18:41.128321Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPartitionTests::TestBatchingWithChangeConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2026-01-10T12:18:42.774460Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:42.779165Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-10T12:18:42.785610Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:42.785680Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-10T12:18:42.791836Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:124} Inline read request KeyValue# 1 Status# OK 2026-01-10T12:18:42.791915Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:136} Expected OK or UNKNOWN and given OK readCount# 0 2026-01-10T12:18:42.791963Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2026-01-10T12:18:23.964656Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.042986Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.043061Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.043123Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.043226Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:24.060842Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:181:2194] 2026-01-10T12:18:24.062683Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:24.000000Z 2026-01-10T12:18:24.062936Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:181:2194] 2026-01-10T12:18:24.088421Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.132557Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.153952Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.164718Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.206554Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.252353Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.274899Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.437348Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.459325Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:25.016950Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.055856Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.055928Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.055961Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.056008Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.068155Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-10T12:18:25.068353Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:25.068611Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:180:2193] 2026-01-10T12:18:25.069403Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request 2026-01-10T12:18:25.069576Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-10T12:18:25.069689Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:18:25.069781Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-10T12:18:25.069917Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:18:25.069991Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2026-01-10T12:18:25.070156Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-10T12:18:25.070203Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-10T12:18:25.070290Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-10T12:18:25.070387Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-10T12:18:25.070514Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-10T12:18:25.070554Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:18:25.070588Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:25.000000Z 2026-01-10T12:18:25.070614Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:18:25.070679Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-10T12:18:25.070798Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:18:25.070838Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:25.070870Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:25.070901Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:180:2193] 2026-01-10T12:18:25.070941Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2026-01-10T12:18:25.070993Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:25.071024Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:25.071049Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.071106Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.071132Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.071167Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.071199Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:25.071269Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:25.071424Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:25.092199Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.123215Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:25.123288Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.123323Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:25.123363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:25.123404Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Pa ... Got KV request Wait kv request Wait tx rollback for tx 0 Wait kv request 2026-01-10T12:18:42.598563Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2026-01-10T12:18:42.598609Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:42.598660Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:42.598730Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:42.598772Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.598812Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:42.598863Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.598910Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:42.598957Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 1 2026-01-10T12:18:42.598994Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 1 2026-01-10T12:18:42.599030Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:42.599064Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:42.599105Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.599301Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Wait tx committed for tx 1 2026-01-10T12:18:42.599472Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:42.620102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:42.630870Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:42.631040Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:42.631098Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.631142Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.631178Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.631223Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.631265Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.631320Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait for no tx committed 2026-01-10T12:18:42.651967Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.652042Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.652115Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.652156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.652197Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.672783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.672845Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.672890Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.672926Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.672955Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.683531Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:42.705065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.705134Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.705175Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.705221Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.705270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.725853Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.725910Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.725938Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.725971Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.725998Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.746603Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.746656Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.746687Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.746718Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.746744Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.768390Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.768467Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.768506Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.768551Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.768590Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.801018Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.801089Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.801131Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.801217Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.801256Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.821951Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.822033Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.822074Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.822117Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.822157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.842799Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.842884Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.842942Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.842985Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.843022Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:42.863802Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:42.863873Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.863913Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:42.863957Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:42.864013Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2026-01-10T12:18:43.999546Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:43.999640Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:272} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1768047523998 ErrorReason# 2026-01-10T12:18:44.007689Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:44.007784Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:242} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1768047524007 ErrorReason# 2026-01-10T12:18:44.012771Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:44.012835Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:224} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1768047524012 ErrorReason# |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> KqpWorkload::STOCK [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> Cdc::Alter [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueTest::TestVacuumOnEmptyTablet >> Cdc::DescribeStream >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2026-01-10T12:18:44.490250Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:44.492231Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2026-01-10T12:18:30.605073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706237053153485:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:30.605223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:30.637581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:30.645650Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:30.663642Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706234622262750:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:30.665494Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:30.682437Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:30.847715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:30.868739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:30.900068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:30.900214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:30.902193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:30.902316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:30.936746Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:30.970871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:30.978586Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:18:30.979500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:31.057044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:31.087047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003ae1/r3tmp/yandexSUrZ8O.tmp 2026-01-10T12:18:31.087068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003ae1/r3tmp/yandexSUrZ8O.tmp 2026-01-10T12:18:31.087214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003ae1/r3tmp/yandexSUrZ8O.tmp 2026-01-10T12:18:31.087294Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:31.135082Z INFO: TTestServer started on Port 12308 GrpcPort 18988 2026-01-10T12:18:31.156478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:18988 2026-01-10T12:18:31.356019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:31.426554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:31.613602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:31.679124Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:33.872218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706249938056748:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:33.872348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:33.872738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706249938056760:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:33.872827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:33.872959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706249938056764:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:33.876548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:33.892240Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706249938056766:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:18:33.982091Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706249938056853:3046] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:34.230304Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706247507164963:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:34.230660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:34.230805Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=N2Y0ODBhOWYtMTE1MGM3ZTQtM2ZlNWM5NGUtOTdmMzUxYWM=, ActorId: [2:7593706247507164939:2305], ActorState: ExecuteState, LegacyTraceId: 01kekxg4981mz89gcncfs4b2t0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:34.232658Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:18:34.233634Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706249938056863:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:34.233924Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTljNzI1OS0xYTZlMTU0Zi03ZjMzY2Y1NC0yNDU0NzAzMg==, ActorId: [1:7593706249938056731:2329], ActorState: ExecuteState, LegacyTraceId: 01kekxg48e5s4mhg3pv6x1btvf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:34.2 ... WAIT_RS 2026-01-10T12:18:44.122210Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-10T12:18:44.122265Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-10T12:18:44.122513Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-10T12:18:44.122566Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/2 2026-01-10T12:18:44.122643Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-10T12:18:44.122699Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:44.122743Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:44.122789Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:44.122853Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:18:44.123150Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2026-01-10T12:18:44.123194Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2026-01-10T12:18:44.123225Z node 6 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user drop done 2026-01-10T12:18:44.123273Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-10T12:18:44.123322Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:44.123367Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:44.123417Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:44.123862Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][1][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-10T12:18:44.123898Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:44.123927Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:44.123953Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:44.123988Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:18:44.124054Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][1][StateIdle] Schedule reply tx done 67890 2026-01-10T12:18:44.124106Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:44.124133Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:44.124177Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:44.124396Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:44.124462Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:44.128682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:44.129036Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:44.129845Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:44.129898Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:44.129933Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:44.129962Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:44.129995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:44.130025Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:44.130070Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:44.130533Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-10T12:18:44.130596Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:18:44.130647Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:18:44.130696Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:18:44.130749Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/2 2026-01-10T12:18:44.130795Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-10T12:18:44.131117Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:44.131290Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:44.131395Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:44.131431Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:44.131460Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:44.131489Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:44.131518Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:44.131544Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:44.131584Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:44.131719Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 1 2026-01-10T12:18:44.131752Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-10T12:18:44.131781Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-10T12:18:44.131813Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-10T12:18:44.131843Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 2/2 2026-01-10T12:18:44.131892Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-10T12:18:44.131942Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-10T12:18:44.132237Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 } Consumers { Name: "client-3" Generation: 2 } 2026-01-10T12:18:44.132316Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:44.132410Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-10T12:18:44.132498Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-10T12:18:44.132571Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-10T12:18:44.132629Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-10T12:18:44.132676Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-10T12:18:44.132747Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-10T12:18:44.132792Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-10T12:18:44.132834Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-10T12:18:44.132885Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-10T12:18:44.132939Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-10T12:18:44.132992Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-10T12:18:44.133085Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:18:44.135437Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:18:44.135518Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-10T12:18:44.135571Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-10T12:18:44.135628Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 1326, MsgBus: 15761 2026-01-10T12:17:36.677757Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706002138862572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:36.678517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:36.919849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:36.929615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:36.929742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:36.985288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:37.009590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:37.040918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706002138862544:2081] 1768047456676037 != 1768047456676040 2026-01-10T12:17:37.123345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:37.143578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:37.143613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:37.143623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:37.143739Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:37.604173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:37.693323Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:39.903642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015023765324:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.903767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.904197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706015023765334:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:39.904288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:40.170390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.296959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:40.874653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:41.269004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706023613703836:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.269122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.269419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706023613703841:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.269494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706023613703842:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.269518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:41.273729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:41.288035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706023613703845:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:17:41.398819Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706023613703896:5098] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:41.678060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706002138862572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:41.678127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:51.897197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:17:51.897235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.514596s took: 0.519091s took: 0.520487s took: 0.520932s took: 0.524315s took: 0.530597s took: 0.530778s took: 0.530763s took: 0.531672s took: 0.541060s took: 6.302557s 2026-01-10T12:18:35.734998Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711090; took: 6.317032s took: 6.327451s 2026-01-10T12:18:35.767936Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711092; 2026-01-10T12:18:35.768620Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711091; 2026-01-10T12:18:35.768801Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711090; 2026-01-10T12:18:35.769006Z node 1 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1768047515781 : 281474976711090] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-10T12:18:35.769791Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7593706255541947346:5026], Table: `/Root/stock` ([72057594046644480:38:1]), SessionActorId: [1:7593706229772142297:5026]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7593706255541947346:5026].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:18:35.770421Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706255541946587:5026], SessionActorId: [1:7593706229772142297:5026], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7593706229772142297:5026]. took: 6.341294s 2026-01-10T12:18:35.770831Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NTU4M2M1OTctZjAwYjMzY2UtZDRkNzgyZjgtY2I2ZmFkYzg=, ActorId: [1:7593706229772142297:5026], ActorState: ExecuteState, LegacyTraceId: 01kekxg0f78b8f3ejd2ss7579s, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7593706255541947290:5026] from: [1:7593706255541946587:5026] trace_id# 2026-01-10T12:18:35.770940Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706255541947290:5026] TxId: 281474976711090. Ctx: { TraceId: 01kekxg0f78b8f3ejd2ss7579s, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTU4M2M1OTctZjAwYjMzY2UtZDRkNzgyZjgtY2I2ZmFkYzg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:18:35.771235Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTU4M2M1OTctZjAwYjMzY2UtZDRkNzgyZjgtY2I2ZmFkYzg=, ActorId: [1:7593706229772142297:5026], ActorState: ExecuteState, LegacyTraceId: 01kekxg0f78b8f3ejd2ss7579s, Create QueryResponse for error on ... abletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-10T12:18:44.063512Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:18:44.063522Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-10T12:18:44.063532Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-10T12:18:44.063541Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-10T12:18:44.063551Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2026-01-10T12:18:44.063561Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-10T12:18:44.063574Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2026-01-10T12:18:44.065169Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-10T12:18:44.069791Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2026-01-10T12:18:44.069815Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-10T12:18:44.075680Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-10T12:18:44.075707Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-10T12:18:44.075721Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-10T12:18:44.076429Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2026-01-10T12:18:44.076447Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-10T12:18:44.076455Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:18:44.076465Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2026-01-10T12:18:44.076475Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-10T12:18:44.076677Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-10T12:18:44.076704Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-10T12:18:44.076714Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2026-01-10T12:18:44.077721Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-10T12:18:44.077778Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-10T12:18:44.080941Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-10T12:18:44.081194Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-10T12:18:44.081221Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-10T12:18:44.081361Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2026-01-10T12:18:44.081377Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-10T12:18:44.084405Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-10T12:18:44.246549Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2026-01-10T12:18:44.246590Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2026-01-10T12:18:44.246608Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2026-01-10T12:18:44.246626Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2026-01-10T12:18:44.246643Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2026-01-10T12:18:44.246658Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2026-01-10T12:18:44.246675Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2026-01-10T12:18:44.246692Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2026-01-10T12:18:44.246709Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2026-01-10T12:18:44.246725Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2026-01-10T12:18:44.246753Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2026-01-10T12:18:44.264727Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2026-01-10T12:18:44.264768Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2026-01-10T12:18:44.264784Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2026-01-10T12:18:44.264800Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2026-01-10T12:18:44.264814Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2026-01-10T12:18:44.264871Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2026-01-10T12:18:44.264885Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2026-01-10T12:18:44.264900Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2026-01-10T12:18:44.264991Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2026-01-10T12:18:44.265013Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2026-01-10T12:18:44.265029Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2026-01-10T12:18:44.265043Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2026-01-10T12:18:44.265057Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2026-01-10T12:18:44.265111Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2026-01-10T12:18:44.265125Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2026-01-10T12:18:44.265138Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2026-01-10T12:18:44.265158Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2026-01-10T12:18:44.265180Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2026-01-10T12:18:44.265195Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2026-01-10T12:18:44.265209Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2026-01-10T12:18:44.265222Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2026-01-10T12:18:44.265260Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2026-01-10T12:18:44.265290Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2026-01-10T12:18:44.265310Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2026-01-10T12:18:44.265325Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2026-01-10T12:18:44.268319Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2026-01-10T12:18:44.275194Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2026-01-10T12:18:44.275232Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2026-01-10T12:18:44.275513Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease [GOOD] >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPQTest::TestCompaction [GOOD] >> TPQTest::TestDirectReadHappyWay ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [1:125:2150] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:134:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-01-10T12:17:07.716469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:07.716574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:07.716641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:07.716683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:07.716716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:07.716744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:07.716798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:07.716863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:07.717584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:07.717773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:07.820674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:17:07.820730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:07.821373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:188:2058] recipient: [1:15:2062] 2026-01-10T12:17:07.829722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:07.830299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:07.830477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:07.836382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:07.836576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:07.837268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:07.837940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:07.840207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:07.840391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:07.841535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:07.841600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:07.841808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:07.841860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:07.841910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:07.842005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:07.970508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:07.971869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 0T12:18:45.748983Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:45.749013Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:209:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2026-01-10T12:18:45.749049Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:209:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2026-01-10T12:18:45.749529Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:45.749570Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:18:45.749635Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:45.749664Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:18:45.749694Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-10T12:18:45.750334Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.750411Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.750446Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-10T12:18:45.750475Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:18:45.750505Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:18:45.751474Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.751556Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.751581Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-10T12:18:45.751607Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:18:45.751636Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:18:45.751701Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-01-10T12:18:45.754206Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:18:45.754248Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:18:45.754285Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 39], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:18:45.754364Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:18:45.754391Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:45.754422Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:18:45.754449Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:45.754478Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-10T12:18:45.754511Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:18:45.754540Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-10T12:18:45.754566Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-10T12:18:45.754657Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:18:45.754688Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:18:45.755211Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:18:45.755252Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:18:45.755308Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:18:45.755519Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.755664Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-10T12:18:45.757569Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-10T12:18:45.757753Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-10T12:18:45.757789Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-10T12:18:45.758078Z node 62 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-10T12:18:45.758138Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-10T12:18:45.758167Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:877:2848] TestWaitNotification: OK eventTxId 1003 2026-01-10T12:18:45.758489Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:18:45.758647Z node 62 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 189us result status StatusSuccess 2026-01-10T12:18:45.759091Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::TestBatchingWithProposeConfig >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestGetStatusWorksNewApi >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestMessageNo >> TPQTest::TestReadSessions >> TKeyValueCollectorTest::TestKeyValueCollectorSingle |91.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestGetTimestamps >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] Test command err: 2026-01-10T12:18:23.748951Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.829727Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:23.829803Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:23.829865Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:23.829931Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:23.848568Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-10T12:18:23.850491Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:23.000000Z 2026-01-10T12:18:23.850732Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-10T12:18:23.871858Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.916308Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.938302Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.951232Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.998985Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.043691Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.064590Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\315\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.216872Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.238002Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\315\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\315\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.312384Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 2026-01-10T12:18:24.312493Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\315\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\315\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:24.817978Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.867210Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.867266Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.867329Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.867385Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:24.878365Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:181:2194] 2026-01-10T12:18:24.880004Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:24.000000Z 2026-01-10T12:18:24.880266Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:181:2194] 2026-01-10T12:18:24.901261Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.948284Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.970156Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.980667Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.024276Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.066164Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.087102Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:25.242841Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.264863Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\200\325\337\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:25.654078Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.729062Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.729115Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.729159Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.729206Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:25.746274Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-10T12:18:25.746509Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:25.746756Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [3:182:2195] 2026-01-10T12:18:25.747710Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Go ... 026-01-10T12:18:48.951346Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:48.951376Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:48.951419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:48.951446Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:48.951501Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:48.951644Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-10T12:18:48.951678Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:48.951708Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-10T12:18:48.951745Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 26 affect SourceId src1 2026-01-10T12:18:48.951804Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:48.951829Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:48.951858Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:48.951887Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:48.951922Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:48.951971Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-10T12:18:48.952015Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:48.952046Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:48.952065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:48.952101Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:48.952120Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:48.952135Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait for no tx committed 2026-01-10T12:18:48.952288Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2026-01-10T12:18:48.952344Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:48.952384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:48.952433Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:48.952461Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:48.952509Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-10T12:18:48.952539Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 26 2026-01-10T12:18:48.952590Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-10T12:18:48.952645Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 26 2026-01-10T12:18:48.952686Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:48.952714Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:48.952745Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-10T12:18:48.963197Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:48.983937Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.004428Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.024889Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.045464Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.076952Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.097445Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.118058Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.138685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.149079Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:49.170643Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.201986Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events Send disk status response with cookie: 0 Wait tx committed for tx 26 2026-01-10T12:18:49.202230Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:49.202370Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-10T12:18:49.202427Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.202487Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:49.202537Z node 6 :PERSQUEUE DEBUG: partition.cpp:1608: [72057594037927937][Partition][0][StateIdle] TxId (empty maybe) affect SourceId src1 2026-01-10T12:18:49.202609Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:49.202663Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.202715Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-10T12:18:49.202762Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-10T12:18:49.202829Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-10T12:18:49.202886Z node 6 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-10T12:18:49.202935Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:49.202975Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:49.203053Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.203259Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got batch complete: 1 Got KV request Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 28 2026-01-10T12:18:49.203510Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:49.224170Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:49.224281Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:49.224399Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-10T12:18:49.224457Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.224499Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.224534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.224591Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.224643Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.224708Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:451:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:454:2057] recipient: [2:453:2379] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:456:2057] recipient: [2:453:2379] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:455:2380] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:571:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:451:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:454:2057] recipient: [3:453:2379] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:456:2057] recipient: [3:453:2379] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:455:2380] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:571:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:452:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:455:2057] recipient: [4:454:2379] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:457:2057] recipient: [4:454:2379] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:456:2380] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:572:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TPartitionTests::UserActCount [GOOD] >> TPartitionTests::TooManyImmediateTxs >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueTest::TestRenameWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2026-01-10T12:18:31.130037Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.212012Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:31.212121Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:31.212196Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:31.212262Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:31.231622Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-10T12:18:31.234420Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:31.000000Z 2026-01-10T12:18:31.234718Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-10T12:18:31.256833Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.298175Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.319210Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.329803Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.371268Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.412773Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:31.433689Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-1" IncludeFrom: true To: "c0000000003Sclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-1|0000000000000000" IncludeFrom: true To: "c0000000003wclient-1|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-2@\000" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\213\340\276\27238\001" StorageChannel: INLINE } 2026-01-10T12:18:31.966795Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:32.017213Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:32.017272Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:32.017321Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:32.017374Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:32.033706Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-10T12:18:32.033982Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:32.034304Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:180:2193] 2026-01-10T12:18:32.035392Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request 2026-01-10T12:18:32.035592Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-10T12:18:32.035764Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:18:32.035877Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-10T12:18:32.036008Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:18:32.036117Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2026-01-10T12:18:32.036371Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-10T12:18:32.036447Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-10T12:18:32.036571Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-10T12:18:32.036687Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-10T12:18:32.036829Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-10T12:18:32.036875Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:18:32.036927Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-10T12:18:32.000000Z 2026-01-10T12:18:32.036993Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:18:32.037105Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-10T12:18:32.037281Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:18:32.037328Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:32.037382Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:32.037444Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:180:2193] 2026-01-10T12:18:32.037508Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2026-01-10T12:18:32.037575Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:32.037617Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:32.037673Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:32.037716Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:32.037750Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:32.037792Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:32.037829Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:32.037914Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:32.038114Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:32.058970Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:32.090118Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:32.090218Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:32.090266Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:32.090332Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:32.090379Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:32.100745Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:32.121633Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:32.121703Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:32.121738Z node 2 :PERSQUEUE DEBUG: partition.cp ... ser action and tx events 2026-01-10T12:18:51.036886Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.057366Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.078561Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.109464Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.129972Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.150469Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.170968Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.191384Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:51.201865Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.222356Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.242812Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.263302Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.283772Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.335897Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.356570Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 Wait tx rollback for tx 0 2026-01-10T12:18:51.356723Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:51.356817Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:51.356855Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:51.356886Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-10T12:18:51.356926Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:51.356951Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:51.356978Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:51.357003Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:51.357033Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait immediate tx complete 2 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 2026-01-10T12:18:51.357173Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-10T12:18:51.357206Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:51.357229Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:51.357253Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:51.357285Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:18:51.357355Z node 5 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2026-01-10T12:18:51.357387Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-10T12:18:51.357412Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:51.357436Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:51.357462Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2026-01-10T12:18:51.357845Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:51.368235Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.368475Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:51.368608Z node 5 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:51.368705Z node 5 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2026-01-10T12:18:51.368803Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:51.368840Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:51.368884Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-10T12:18:51.368944Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:51.368968Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:51.368999Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-10T12:18:51.369035Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-10T12:18:51.369067Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-10T12:18:51.369104Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:51.369136Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:51.369172Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:51.369332Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got KV request Got KV request 2026-01-10T12:18:51.369821Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:51.369874Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request 2026-01-10T12:18:51.370101Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 4 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 partno 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2026-01-10T12:18:51.370258Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 0 count 45 last offset 0, current partition end offset: 50 2026-01-10T12:18:51.370285Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 4. Send blob request. Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 4 2026-01-10T12:18:51.370411Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:51.390892Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:51.401125Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:51.401243Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:51.401281Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:51.401311Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:51.401338Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:51.401366Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:51.401392Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:51.401425Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::PartitionKeyCompaction >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-10T12:12:41.201662Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.201698Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.201730Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:12:41.202229Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:41.202281Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.202315Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.203441Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009333s 2026-01-10T12:12:41.205410Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:41.205452Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.205476Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.205535Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009251s 2026-01-10T12:12:41.205990Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-10T12:12:41.206022Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.206045Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:12:41.206104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006328s 2026-01-10T12:12:41.224143Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1768047161224114 2026-01-10T12:12:41.811027Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593704736197580852:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:41.811091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:41.915518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:41.959785Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:41.981889Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593704734995010515:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:12:41.981937Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:12:42.048238Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:12:42.360223Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:42.361677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:12:42.538403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:42.538519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:42.541365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:42.541437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:42.567705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.603630Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:12:42.606139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:42.623901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:42.637531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:12:42.649543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:42.885584Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:42.924112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0028d8/r3tmp/yandexstWmbu.tmp 2026-01-10T12:12:42.924141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0028d8/r3tmp/yandexstWmbu.tmp 2026-01-10T12:12:42.924347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0028d8/r3tmp/yandexstWmbu.tmp 2026-01-10T12:12:42.924451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:43.025338Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:43.067402Z INFO: TTestServer started on Port 9203 GrpcPort 19390 PQClient connected to localhost:19390 2026-01-10T12:12:43.432260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:46.608987Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704756469847305:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.609147Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.609833Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704756469847315:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.608986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704757672418586:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.609169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.610150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704757672418600:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.610191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704757672418599:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.610275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.613224Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593704756469847321:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.613357Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.616796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:12:46.629680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704757672418636:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.629800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.630197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593704757672418639:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.630254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:46.677888Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593704756469847324:2133] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creati ... 3195-3c13950e-bd2f77ae-f1f5206e_0] Write session: acknoledged message 8 2026-01-10T12:18:49.027336Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: acknoledged message 9 2026-01-10T12:18:49.027354Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: acknoledged message 10 2026-01-10T12:18:49.027591Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: close. Timeout = 0 ms 2026-01-10T12:18:49.027651Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session will now close 2026-01-10T12:18:49.027704Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: aborting 2026-01-10T12:18:49.028239Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:18:49.028290Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0] Write session: destroy 2026-01-10T12:18:49.029367Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0 grpc read done: success: 0 data: 2026-01-10T12:18:49.029400Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0 grpc read failed 2026-01-10T12:18:49.029437Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0 grpc closed 2026-01-10T12:18:49.029474Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|57433195-3c13950e-bd2f77ae-f1f5206e_0 is DEAD 2026-01-10T12:18:49.030567Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:18:49.030710Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [17:7593706315470445751:2606] destroyed 2026-01-10T12:18:49.030752Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:18:49.030789Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.030816Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.030837Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.030865Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.030884Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.071552Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.071600Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.071622Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.071648Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.071666Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.171880Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.171925Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.171946Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.171979Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.171999Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.272205Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.272252Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.272271Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.272295Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.272315Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.372592Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.372641Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.372660Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.372686Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.372706Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.472914Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.472958Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.472980Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.473006Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.473027Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.573275Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.573323Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.573350Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.573382Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.573405Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.673630Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.673682Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.673706Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.673735Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.673757Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.773955Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.773994Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.774010Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.774028Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.774043Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.874336Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.874384Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.874407Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.874434Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.874455Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:49.974645Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:49.974680Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.974696Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:49.974717Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:49.974732Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:18:50.075077Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:50.075125Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:50.075151Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:50.075182Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:50.075217Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |91.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace |91.4%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage |91.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2026-01-10T12:18:53.855473Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:53.857855Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-10T12:18:53.863830Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:53.863915Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-10T12:18:53.869617Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-10T12:18:53.869744Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:312} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2026-01-10T12:18:53.869412Z ErrorReason# |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpWorkload::KV [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:78:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:81:2057] recipient: [5:80:2112] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:83:2057] recipient: [5:80:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:82:2113] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:198:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:79:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:84:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:83:2113] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:199:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> Cdc::DecimalKey [GOOD] >> Cdc::AddColumn >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TKeyValueTest::TestWriteLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 21190, MsgBus: 11302 2026-01-10T12:17:25.100589Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705954053548253:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:25.101862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:17:25.339965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:17:25.347838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:17:25.347992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:17:25.420714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:17:25.432729Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:25.433566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593705954053548215:2081] 1768047445097520 != 1768047445097523 2026-01-10T12:17:25.534880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:17:25.572711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:17:25.572738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:17:25.572747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:17:25.572839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:17:25.997633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:17:26.007457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:17:26.106816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:17:28.135679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705966938450992:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.135832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.136306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705966938451002:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.136398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.368268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:17:28.826638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705966938452572:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.826724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.826971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705966938452577:2454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.827028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593705966938452578:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.827105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:17:28.830592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:17:28.841976Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593705966938452581:2456], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:17:28.941068Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593705966938452632:3554] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:17:30.100352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593705954053548253:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:17:30.100493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:17:40.296162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:17:40.296186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.086099s took: 0.086167s took: 0.086182s took: 0.086012s took: 0.086554s took: 0.087057s took: 0.087161s took: 0.088541s took: 0.088643s took: 0.129961s took: 0.103189s took: 0.104503s took: 0.106617s took: 0.106911s took: 0.107410s took: 0.107916s took: 0.107496s took: 0.108331s took: 0.108704s took: 0.110048s took: 0.214253s took: 0.214735s took: 0.215336s took: 0.216172s took: 0.216492s took: 0.217631s took: 0.219344s took: 0.220066s took: 0.220307s took: 0.220345s took: 0.026949s took: 0.030476s took: 0.030549s took: 0.032327s took: 0.032806s took: 0.032948s took: 0.032987s took: 0.035713s took: 0.035954s took: 0.036293s took: 0.128257s took: 0.128337s took: 0.130717s took: 0.132322s took: 0.131741s took: 0.131806s took: 0.134865s took: 0.138542s took: 0.138616s took: 0.138410s 2026-01-10T12:18:53.883367Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:18:53.883836Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-10T12:18:53.883857Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2026-01-10T12:18:53.883866Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-10T12:18:53.883881Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2026-01-10T12:18:53.883894Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-10T12:18:53.883907Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-10T12:18:53.883918Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2026-01-10T12:18:53.883928Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2026-01-10T12:18:53.883942Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-10T12:18:53.898712Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2026-01-10T12:18:53.898764Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2026-01-10T12:18:53.898788Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-10T12:18:53.898809Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2026-01-10T12:18:53.898829Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2026-01-10T12:18:53.898847Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-10T12:18:53.898863Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-10T12:18:53.898879Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-10T12:18:53.898898Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-10T12:18:53.898937Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-10T12:18:53.898966Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:18:53.898989Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-10T12:18:53.899011Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2026-01-10T12:18:53.899034Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-10T12:18:53.899052Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-10T12:18:53.899084Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-10T12:18:53.899107Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-10T12:18:53.899139Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-10T12:18:53.899176Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-10T12:18:53.899198Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-10T12:18:53.899212Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-10T12:18:53.899227Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-10T12:18:53.899242Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-10T12:18:53.899257Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-10T12:18:53.899272Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:18:53.899314Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2026-01-10T12:18:53.899332Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2026-01-10T12:18:53.928323Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-10T12:18:53.928469Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-10T12:18:53.928506Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:203:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false |91.5%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2026-01-10T12:18:22.863867Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:22.928356Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:22.928431Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:22.928490Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:22.928560Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:22.944475Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-10T12:18:22.945259Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:22.970808Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.002632Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.013359Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.034630Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.035189Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.045971Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.067201Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.088817Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.100352Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.122667Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.133501Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.154712Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.168730Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.192617Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.216738Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.238188Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.296718Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.307612Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.328563Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:23.356599Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-10T12:18:23.379076Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Ra ... cess user action and tx pending commits 2026-01-10T12:18:54.453280Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:54.453340Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:54.453394Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:54.453548Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:18:54.463866Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.495008Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.526108Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.546912Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.598539Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.670405Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.754097Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:54.785327Z node 4 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2026-01-10T12:18:54.785401Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:54.785446Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:54.785487Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:54.785531Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:54.785573Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:54.806239Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2026-01-10T12:18:54.806370Z node 4 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2026-01-10T12:18:54.806434Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:54.806481Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:54.806542Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:54.806584Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:54.806630Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:54.806756Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2026-01-10T12:18:54.807714Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2026-01-10T12:18:54.807796Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:54.807835Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:18:54.807885Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:54.808451Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 101 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000? size 105 WTime 1329 2026-01-10T12:18:54.808852Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:54.829525Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:18:54.829646Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:18:54.829735Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:54.829797Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-10T12:18:54.829880Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2026-01-10T12:18:54.830130Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:54.830183Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:54.830227Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:54.830266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:54.830303Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:18:54.830359Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:18:55.102481Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.149382Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:55.149441Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:55.149485Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:55.149536Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:55.164529Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] bootstrapping {1, {2, 3}, 4} [5:184:2196] 2026-01-10T12:18:55.165442Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [5:184:2196] 2026-01-10T12:18:55.176039Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.207031Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.227741Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.258825Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.289909Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.310714Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.362124Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.433888Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.517767Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.712478Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.743537Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.970817Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:55.992331Z node 5 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][{1, {2, 3}, 4}][StateIdle] Got error: The transaction is completed Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionScaleManagerGraphCmpTest::Equal [GOOD] >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInSourceTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::GenericCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] Test command err: 2026-01-10T12:18:29.314498Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706230321972308:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:29.319802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:29.364646Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:29.364232Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706230942875877:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:29.364442Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:29.375185Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:29.548201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:29.580140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:29.609136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:29.609227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:29.610845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:29.610992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:29.620113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:29.622422Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:18:29.623167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:29.675186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:29.715787Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:29.762955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003b1a/r3tmp/yandexk0iTJU.tmp 2026-01-10T12:18:29.762982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b1a/r3tmp/yandexk0iTJU.tmp 2026-01-10T12:18:29.763174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b1a/r3tmp/yandexk0iTJU.tmp 2026-01-10T12:18:29.763301Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:29.802991Z INFO: TTestServer started on Port 18539 GrpcPort 21585 2026-01-10T12:18:29.878699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:21585 2026-01-10T12:18:29.987320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:30.085414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:30.331964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:30.371411Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:32.575388Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706243827778057:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:32.575502Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:32.575503Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706243827778082:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:32.575762Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706243827778086:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:32.575812Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:32.581282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:32.602103Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706243827778087:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-10T12:18:32.706454Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706243827778115:2182] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:32.931716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:32.937057Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706243206875637:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:32.937075Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706243827778129:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:32.937576Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NjgzNTMxODYtZjEyZmFjZWUtNjJmZGFhYmUtNDdiMDcyZWQ=, ActorId: [2:7593706243827778054:2304], ActorState: ExecuteState, LegacyTraceId: 01kekxg2zw4hbc88xvznv6vzyf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:32.937603Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTA3YjU0ZGYtYWUxYjE1ZGUtMzliMTNjZjAtZTNlNTI2ZDY=, ActorId: [1:7593706243206875612:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxg338eaxqr8g9rv6b0qzh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:32.940673Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:18:32.940713Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' be ... ror=timeout; === CheckClustersList. Ok 2026-01-10T12:18:53.145989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:53.527307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:18:53.974166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:54.441773Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:18:54.794820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:18:55.195352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1768047535550, 1768047535550, 0, 13); 2026-01-10T12:18:55.646306Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:18:55.646327Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:18:55.646333Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:18:55.646353Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-10T12:18:55.646486Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7593706344224465840:4063], Recipient [3:7593706335634530395:3545]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7593706344224465839:4063] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-10T12:18:55.646585Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7593706344224465839:4063], Recipient [3:7593706335634530395:3545]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2026-01-10T12:18:55.646666Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [3:7593706335634530395:3545], Recipient [3:7593706344224465839:4063]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-10T12:18:55.646702Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-10T12:18:55.646763Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7593706344224465839:4063], Recipient [3:7593706335634530395:3545]: NActors::TEvents::TEvPoison 2026-01-10T12:18:55.646820Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [3:7593706296979822526:2073], Recipient [3:7593706344224465839:4063]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-10T12:18:55.646844Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2026-01-10T12:18:55.649558Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [3:7593706296979822744:2269], Recipient [3:7593706344224465839:4063]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=3&id=ZTI0NGU3OTUtYjJiODljZjItNzE2ODJkYjEtMTdhN2M1NWM=" NodeId: 3 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-10T12:18:55.649602Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2026-01-10T12:18:55.788058Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [3:7593706296979822744:2269], Recipient [3:7593706344224465839:4063]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=ZTI0NGU3OTUtYjJiODljZjItNzE2ODJkYjEtMTdhN2M1NWM=" PreparedQuery: "be165bd-14b5b820-bf5aacd7-ff23232" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kekxgsn74a8vgx89qptzrprr" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1768047535550 } items { uint64_value: 1768047535550 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS Con 2026-01-10T12:18:55.788218Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-10T12:18:55.788240Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-10T12:18:55.788337Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7593706344224465892:4063], Recipient [3:7593706335634530395:3545]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7593706344224465839:4063] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-10T12:18:55.788380Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7593706344224465839:4063], Recipient [3:7593706335634530395:3545]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2026-01-10T12:18:55.788423Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [3:7593706335634530395:3545], Recipient [3:7593706344224465839:4063]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-10T12:18:55.788451Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2026-01-10T12:18:55.788623Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7593706344224465839:4063], Recipient [3:7593706335634530395:3545]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2026-01-10T12:18:55.872397Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [3:7593706296979822744:2269], Recipient [3:7593706344224465839:4063]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=ZTI0NGU3OTUtYjJiODljZjItNzE2ODJkYjEtMTdhN2M1NWM=" PreparedQuery: "b70cb6ba-f5f02315-236f3494-e034c852" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 47 1 2026-01-10T12:18:55.872443Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-10T12:18:55.872483Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2026-01-10T12:18:55.872506Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7593706344224465839:4063] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> TContinuousBackupWithRebootsTests::Basic [GOOD] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::Basic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:18:37.316286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:18:37.316448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:37.316511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:18:37.317089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:18:37.317145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:18:37.317193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:18:37.317289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:37.317373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:18:37.318406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:37.319403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:18:37.483247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:18:37.483394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:37.484380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:37.507456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:18:37.508042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:18:37.510231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:18:37.525528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:18:37.526313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:18:37.529091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:18:37.530916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:18:37.536909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:37.537120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:18:37.542001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:18:37.542084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:37.542308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:18:37.542379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:18:37.542428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:18:37.543115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:18:37.710560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.712555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.712707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.712773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.712853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.712965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.713748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... _SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:18:59.648292Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 6 2026-01-10T12:18:59.648316Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:18:59.648570Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:18:59.648594Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:18:59.648616Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-10T12:18:59.648641Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:18:59.648688Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-10T12:18:59.649155Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:18:59.649183Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:18:59.649203Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-10T12:18:59.650874Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:18:59.650971Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:18:59.653258Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:18:59.653431Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:18:59.653566Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:18:59.653938Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:18:59.654020Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:18:59.654077Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:18:59.654111Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:18:59.654229Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:18:59.654266Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:18:59.654303Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:18:59.654337Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:18:59.654375Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-10T12:18:59.654417Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:18:59.654460Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:18:59.654497Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:18:59.654635Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:18:59.654675Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-10T12:18:59.654705Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-10T12:18:59.654732Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:18:59.654753Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-10T12:18:59.654771Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-10T12:18:59.654818Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:18:59.655132Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:18:59.655171Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:18:59.655249Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:18:59.655307Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:18:59.655377Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:18:59.655421Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:18:59.655450Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:18:59.657477Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:18:59.657759Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:18:59.657798Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:18:59.658194Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:18:59.658265Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:18:59.658301Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [16:1153:3069] TestWaitNotification: OK eventTxId 104 2026-01-10T12:18:59.658776Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:18:59.658991Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 238us result status StatusPathDoesNotExist 2026-01-10T12:18:59.659141Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:18:59.659693Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:18:59.659922Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 248us result status StatusPathDoesNotExist 2026-01-10T12:18:59.660114Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] >> PQCountersLabeled::PartitionKeyCompaction [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:18:54.946881Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706340251545636:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:54.946939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:54.972597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:55.216889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:55.216925Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706340251545609:2081] 1768047534945519 != 1768047534945522 2026-01-10T12:18:55.225615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:55.226580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:55.226680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:55.230532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:55.425996Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706340251545862:2107] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:55.426071Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706344546513662:2447] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:55.426197Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706340251545903:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:55.426362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706344546513550:2360][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706340251545903:2130], cookie# 1 2026-01-10T12:18:55.428042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706344546513559:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513556:2360], cookie# 1 2026-01-10T12:18:55.428104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706344546513560:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513557:2360], cookie# 1 2026-01-10T12:18:55.428165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706344546513561:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513558:2360], cookie# 1 2026-01-10T12:18:55.428237Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706340251545577:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513559:2360], cookie# 1 2026-01-10T12:18:55.428251Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706340251545580:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513560:2360], cookie# 1 2026-01-10T12:18:55.428289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706340251545583:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706344546513561:2360], cookie# 1 2026-01-10T12:18:55.428320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706344546513559:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706340251545577:2049], cookie# 1 2026-01-10T12:18:55.428376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706344546513560:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706340251545580:2052], cookie# 1 2026-01-10T12:18:55.428413Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706344546513561:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706340251545583:2055], cookie# 1 2026-01-10T12:18:55.428498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706344546513550:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706344546513556:2360], cookie# 1 2026-01-10T12:18:55.428532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706344546513550:2360][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:55.428556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706344546513550:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706344546513557:2360], cookie# 1 2026-01-10T12:18:55.428578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706344546513550:2360][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:55.428621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706344546513550:2360][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706344546513558:2360], cookie# 1 2026-01-10T12:18:55.428642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706344546513550:2360][/dc-1] Sync cookie mismatch: sender# [1:7593706344546513558:2360], cookie# 1, current cookie# 0 2026-01-10T12:18:55.428682Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706340251545903:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:55.435005Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706340251545903:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706344546513550:2360] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:55.435201Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706340251545903:2130], cacheItem# { Subscriber: { Subscriber: [1:7593706344546513550:2360] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:18:55.438190Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706344546513663:2448], recipient# [1:7593706344546513662:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:55.438269Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706344546513662:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:18:55.483954Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706344546513662:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:18:55.487604Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706344546513662:2447] Handle TEvDescribeSchemeResult Forward to# [1:7593706344546513661:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:18:55.489123Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593706340251545862:2107] Handle TEvProposeTransaction 2026-01-10T12:18:55.489156Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593706340251545862:2107] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:18:55.489220Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593706340251545862:2107] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593706344546513668:2452] 2026-01-10T12:18:55.520565Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706340251545903:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Op ... 2026-01-10T12:18:58.611708Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7593706352649957592:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7593706356944925509:2569] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047538124 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:58.611819Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593706352649957592:2128], cacheItem# { Subscriber: { Subscriber: [2:7593706356944925509:2569] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047538124 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-10T12:18:58.611985Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593706356944925670:2663], recipient# [2:7593706356944925669:2662], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:58.612022Z node 2 :TX_PROXY INFO: describe.cpp:354: Actor# [2:7593706356944925669:2662] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-10T12:18:58.612767Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7593706352649957551:2106] Handle TEvNavigate describe path /dc-1 2026-01-10T12:18:58.612803Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7593706356944925672:2665] HANDLE EvNavigateScheme /dc-1 2026-01-10T12:18:58.612871Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593706352649957592:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:58.612932Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7593706352649957951:2367][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7593706352649957592:2128], cookie# 4 2026-01-10T12:18:58.612970Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706352649957965:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957962:2367], cookie# 4 2026-01-10T12:18:58.612986Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706352649957966:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957963:2367], cookie# 4 2026-01-10T12:18:58.612995Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706352649957967:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957964:2367], cookie# 4 2026-01-10T12:18:58.613008Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706352649957269:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957965:2367], cookie# 4 2026-01-10T12:18:58.613009Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706352649957272:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957966:2367], cookie# 4 2026-01-10T12:18:58.613023Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706352649957275:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706352649957967:2367], cookie# 4 2026-01-10T12:18:58.613050Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706352649957966:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957272:2052], cookie# 4 2026-01-10T12:18:58.613076Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706352649957965:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957269:2049], cookie# 4 2026-01-10T12:18:58.613097Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706352649957967:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957275:2055], cookie# 4 2026-01-10T12:18:58.613129Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706352649957951:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957963:2367], cookie# 4 2026-01-10T12:18:58.613152Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7593706352649957951:2367][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:58.613175Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706352649957951:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957962:2367], cookie# 4 2026-01-10T12:18:58.613195Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7593706352649957951:2367][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:58.613228Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706352649957951:2367][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7593706352649957964:2367], cookie# 4 2026-01-10T12:18:58.613233Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7593706352649957592:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:58.613244Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7593706352649957951:2367][/dc-1] Sync cookie mismatch: sender# [2:7593706352649957964:2367], cookie# 4, current cookie# 0 2026-01-10T12:18:58.613270Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7593706352649957592:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7593706352649957951:2367] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047538110 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:58.613321Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593706352649957592:2128], cacheItem# { Subscriber: { Subscriber: [2:7593706352649957951:2367] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047538110 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-10T12:18:58.613422Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593706356944925673:2666], recipient# [2:7593706356944925672:2665], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:58.613443Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7593706356944925672:2665] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:18:58.613483Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7593706356944925672:2665] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:18:58.614038Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7593706356944925672:2665] Handle TEvDescribeSchemeResult Forward to# [2:7593706356944925671:2664] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1768047538110 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] 2026-01-10T12:18:53.237359Z node 1 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-10T12:19:00.211855Z node 2 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true >> TExtSubDomainTest::GenericCases [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:85:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:205:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:86:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:206:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:87:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:89:2118] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:92:2057] recipient: [12:89:2118] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:91:2119] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:88:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:93:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:92:2120] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2122] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:96:2057] recipient: [15:93:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2123] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-10T12:18:58.475430Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706357626078420:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:58.476024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:58.604234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:58.686393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:58.686529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:58.693925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:58.752339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:58.862864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:18:58.886651Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706357626078647:2126] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:58.886711Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706357626079131:2446] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:58.886796Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706357626078657:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:58.886932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706357626078886:2281][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706357626078657:2128], cookie# 1 2026-01-10T12:18:58.888980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706357626078890:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078887:2281], cookie# 1 2026-01-10T12:18:58.889045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706357626078891:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078888:2281], cookie# 1 2026-01-10T12:18:58.889077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706357626078892:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078889:2281], cookie# 1 2026-01-10T12:18:58.889076Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706357626078323:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078890:2281], cookie# 1 2026-01-10T12:18:58.889126Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706357626078326:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078891:2281], cookie# 1 2026-01-10T12:18:58.889152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706357626078329:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706357626078892:2281], cookie# 1 2026-01-10T12:18:58.889198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706357626078890:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078323:2050], cookie# 1 2026-01-10T12:18:58.889242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706357626078891:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078326:2053], cookie# 1 2026-01-10T12:18:58.889263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706357626078892:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078329:2056], cookie# 1 2026-01-10T12:18:58.889317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706357626078886:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078887:2281], cookie# 1 2026-01-10T12:18:58.889365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706357626078886:2281][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:58.889399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706357626078886:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078888:2281], cookie# 1 2026-01-10T12:18:58.889442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706357626078886:2281][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:58.889491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706357626078886:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706357626078889:2281], cookie# 1 2026-01-10T12:18:58.889527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706357626078886:2281][/dc-1] Sync cookie mismatch: sender# [1:7593706357626078889:2281], cookie# 1, current cookie# 0 2026-01-10T12:18:58.889544Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706357626078657:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:58.894770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706357626078657:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706357626078886:2281] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:58.894893Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706357626078886:2281] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:18:58.896897Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706357626079132:2447], recipient# [1:7593706357626079131:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:58.896991Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706357626079131:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:18:58.934129Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706357626079131:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:18:58.937648Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706357626079131:2446] Handle TEvDescribeSchemeResult Forward to# [1:7593706357626079130:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:18:58.939171Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593706357626078647:2126] Handle TEvProposeTransaction 2026-01-10T12:18:58.939195Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593706357626078647:2126] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:18:58.939268Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593706357626078647:2126] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593706357626079137:2451] 2026-01-10T12:18:59.072611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593706357626079137:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-10T12:18:59.072678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... 0510981827:3038] 2026-01-10T12:19:01.223423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593706357626078326:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593706370510981835:3039] 2026-01-10T12:19:01.223438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593706357626078326:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593706370510981836:3038] 2026-01-10T12:19:01.223458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593706370510981825:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7593706370510981828:3038] 2026-01-10T12:19:01.223468Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706357626078657:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:19:01.223483Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593706370510981825:3038][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7593706357626078657:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:01.223503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593706370510981825:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7593706370510981829:3038] 2026-01-10T12:19:01.223524Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706357626078657:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593706370510981826:3039] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:01.223529Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7593706370510981825:3038][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7593706357626078657:2128], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:01.223574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593706357626078323:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593706370510981833:3038] 2026-01-10T12:19:01.223592Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593706357626078329:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593706370510981837:3039] 2026-01-10T12:19:01.223606Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7593706357626078329:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7593706370510981838:3038] 2026-01-10T12:19:01.223606Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706370510981826:3039] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:01.223632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706357626078657:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-10T12:19:01.223686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706357626078657:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7593706370510981825:3038] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:01.223751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706370510981825:3038] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:01.223837Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706370510981839:3040], recipient# [1:7593706370510981821:2328], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:01.479443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706357626078657:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:01.479595Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706361921046936:2852] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:01.479699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706370510981841:3041], recipient# [1:7593706370510981840:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:02.222829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706357626078657:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:02.222930Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706370510981807:3036] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:02.222991Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706374805949157:3045], recipient# [1:7593706374805949156:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:02.480322Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706357626078657:2128], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:02.480421Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706357626078657:2128], cacheItem# { Subscriber: { Subscriber: [1:7593706361921046936:2852] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:02.480493Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706374805949159:3046], recipient# [1:7593706374805949158:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2026-01-10T12:18:31.473121Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:31.546736Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:31.546805Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:31.546869Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:31.546939Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:31.566445Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:31.586634Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:31.587629Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:31.590286Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:31.594243Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:31.596190Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:31.648367Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:31.648925Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|27f672cd-c46161e8-d101ba9e-eece77ba_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:236:2057] recipient: [1:103:2137] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:239:2057] recipient: [1:238:2234] Leader for TabletID 72057594037927937 is [1:240:2235] sender: [1:241:2057] recipient: [1:238:2234] 2026-01-10T12:18:31.823139Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:31.823194Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:31.824016Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:31.824069Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:31.824651Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:289:2235] 2026-01-10T12:18:31.826702Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:290:2235] 2026-01-10T12:18:31.833388Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:31.833916Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:290:2235] 2026-01-10T12:18:31.847167Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:31.847829Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:289:2235] 2026-01-10T12:18:31.848560Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:240:2235] sender: [1:318:2057] recipient: [1:14:2061] Got start offset = 40000 2026-01-10T12:18:32.289467Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-10T12:18:32.334307Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:32.334371Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:32.334424Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:32.334483Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] 2026-01-10T12:18:32.351588Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:32.352391Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "user1" Generation: 2 Important: true } 2026-01-10T12:18:32.352900Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:189:2142] 2026-01-10T12:18:32.354900Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:189:2142] 2026-01-10T12:18:32.357211Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:190:2142] 2026-01-10T12:18:32.358902Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:190:2142] 2026-01-10T12:18:32.398029Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:32.398485Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1bc4a1f4-b4c5b5-6374038d-b93e7097_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:234:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:237:2057] recipient: [2:236:2232] Leader for TabletID 72057594037927937 is [2:238:2233] sender: [2:239:2057] recipient: [2:236:2232] 2026-01-10T12:18:32.538579Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:32.538667Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:32.539643Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:32.539712Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:18:32.540479Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:287:2233] 2026-01-10T12:18:32.542919Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:288:2233] 2026-01-10T12:18:32.552493Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:32.553420Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:288:2233] 2026-01-10T12:18:32.570842Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:18:32.571737Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:287:2233] 2026-01-10T12:18:32.572565Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:238:2233] sender: [2:316:2057] recipient: [2:14:2061] Got start offset = 40000 2026-01-10T12:18:32.891625Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] Lea ... 0000000000000000234_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000235_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000236_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000237_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000238_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000239_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000240_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000241_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000242_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000243_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000244_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000245_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000246_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000247_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000248_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000249_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000250_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000251_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000252_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000253_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000254_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000255_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000256_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000257_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000258_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000259_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000260_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000261_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000262_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000263_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000264_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000265_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000266_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000267_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000268_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000269_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000270_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000271_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000272_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000273_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000274_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000275_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000276_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000277_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000278_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000279_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000280_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000281_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000282_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000283_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000284_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000285_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000286_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000287_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000288_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000289_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000290_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000291_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000292_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000293_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000294_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000295_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000296_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000297_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000298_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000299_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000300_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000301_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000302_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000303_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000304_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000305_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000306_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000307_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000308_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000309_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000310_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000311_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000312_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000313_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000314_00000_0000000001_00000? size 28702 2026-01-10T12:19:04.164554Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:19:04.164604Z node 9 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:19:04.164637Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:04.164672Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:04.164706Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:04.164738Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:04.164771Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:19:04.164831Z node 9 :PERSQUEUE DEBUG: partition.cpp:757: [72057594037927937][Partition][0][StateIdle] Init complete for topic 'rt3.dc1--asdfgs--topic' Partition: 0 SourceId: sourceid SeqNo: 315 offset: 314 MaxOffset: 315 2026-01-10T12:19:04.164870Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 13 2026-01-10T12:19:04.164909Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 13 2026-01-10T12:19:04.164973Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:19:04.165547Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:19:04.166782Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 3 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 partno 0 count 1 size 1024000 endOffset 315 max time lag 0ms effective offset 0 2026-01-10T12:19:04.167081Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 8352628 count 168 last offset 0, current partition end offset: 315 2026-01-10T12:19:04.167130Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-10T12:19:04.167345Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:464: No blob in L1. Partition 0 offset 0 partno 0 count 167 parts_count 0 actorID [9:3737:5380] 2026-01-10T12:19:04.167423Z node 9 :PERSQUEUE DEBUG: read.h:142: [72057594037927937][PQCacheProxy]Reading cookie 3. Have to read 1 of 1 from KV 2026-01-10T12:19:04.167562Z node 9 :PERSQUEUE DEBUG: pq_l2_cache.cpp:223: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 0 partno 0 count 167 parts_count 0 2026-01-10T12:19:04.193922Z node 9 :PERSQUEUE DEBUG: read.h:178: [72057594037927937][PQCacheProxy]Got results. 1 of 1 from KV. Status 1 2026-01-10T12:19:04.193994Z node 9 :PERSQUEUE DEBUG: read.h:195: [72057594037927937][PQCacheProxy]Got results. result 0 from KV. Status 0 2026-01-10T12:19:04.194093Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:398: Prefetched blob in L1. Partition 0 offset 0 count 167 size 8352628 actorID [9:3737:5380] 2026-01-10T12:19:04.194258Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:19:04.212653Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 167 count 11 size 550146 from pos 0 cbcount 11 2026-01-10T12:19:04.220848Z node 9 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 167 parts 0 suffix '0' size 8352628 2026-01-10T12:19:04.221228Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2026-01-10T12:19:04.222435Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 9 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [9:3696:5380] sender: [9:3914:2057] recipient: [9:14:2061] 2026-01-10T12:19:04.223232Z node 9 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [9:3913:5430], now have 1 active actors on pipe Got start offset = 0 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:18:56.266846Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706346458804679:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:56.268517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:56.440668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:56.461273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:56.461383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:56.465724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:56.534409Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:56.653407Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706346458804928:2142] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:56.653468Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706346458805364:2441] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:56.653695Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706346458804935:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:56.653829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706346458805176:2298][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706346458804935:2145], cookie# 1 2026-01-10T12:18:56.655712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346458805189:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805186:2298], cookie# 1 2026-01-10T12:18:56.655810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346458805190:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805187:2298], cookie# 1 2026-01-10T12:18:56.655836Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346458804579:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805189:2298], cookie# 1 2026-01-10T12:18:56.655840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346458805191:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805188:2298], cookie# 1 2026-01-10T12:18:56.655866Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346458804582:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805190:2298], cookie# 1 2026-01-10T12:18:56.655874Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346458804585:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346458805191:2298], cookie# 1 2026-01-10T12:18:56.655921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346458805189:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458804579:2050], cookie# 1 2026-01-10T12:18:56.655936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346458805190:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458804582:2053], cookie# 1 2026-01-10T12:18:56.655946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346458805191:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458804585:2056], cookie# 1 2026-01-10T12:18:56.655981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346458805176:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458805186:2298], cookie# 1 2026-01-10T12:18:56.656021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706346458805176:2298][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:56.656037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346458805176:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458805187:2298], cookie# 1 2026-01-10T12:18:56.656088Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706346458804935:2145], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:56.656090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706346458805176:2298][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:56.656117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346458805176:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346458805188:2298], cookie# 1 2026-01-10T12:18:56.656139Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346458804935:2145], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-10T12:18:56.656142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706346458805176:2298][/dc-1] Sync cookie mismatch: sender# [1:7593706346458805188:2298], cookie# 1, current cookie# 0 2026-01-10T12:18:56.656191Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346458804935:2145], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-10T12:18:56.656212Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346458804935:2145], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-10T12:18:56.656232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593706346458805365:2442][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:18:56.656317Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706346458804935:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:56.656633Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346458804579:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706346458805371:2442] 2026-01-10T12:18:56.656659Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346458804579:2050] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:56.656734Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346458804579:2050] Subscribe: subscriber# [1:7593706346458805371:2442], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:56.656772Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346458804582:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706346458805372:2442] 2026-01-10T12:18:56.656777Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346458804582:2053] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:56.656791Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346458804582:2053] Subscribe: subscriber# [1:7593706346458805372:2442], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:56.656808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346458804585:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706346458805373:2442] 2026-01-10T12:18:56.656811Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346458804585:2056] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:56.656829Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346458804585:2056] Subscribe: subscriber# [1:7593706346458805373:2442], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:56.656883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593706346458805371:2442][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7593706346458804579:2050] 2026-01-10T12:18:56.656927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593706346458805372:2442][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7593706346458804582:2053] 2026-01-10T12:18:56.656964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593706346458805373:2442][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7593706346458804585:2056] 2026-01-10T12:18:56.657018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593706346458805365:2442][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7593706346458805368:2442] 2026-01-10T12:18:56.657066Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7593706346458805365:2442][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7593706346458805369:2442] 2026-01-10T12:18:56.657111Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7593706346458805365:2442][/dc-1/.metadata/script_executions] Set up state: owner# [1:7593706346458804935:2145], state# { Deleted: 1 Strong: 1 Version: DomainId: Strong: 1 } 2026-01-10T12:19:03.235198Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593706363614273906:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593706376499176769:2804] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.235281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706363614273906:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706376499176769:2804] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:03.235348Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706376499176786:2806], recipient# [3:7593706376499176756:2302], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.235413Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706363614273587:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706376499176787:2805] 2026-01-10T12:19:03.235449Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706363614273587:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:03.235494Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706363614273587:2050] Subscribe: subscriber# [3:7593706376499176787:2805], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:03.235502Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706363614273590:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706376499176788:2805] 2026-01-10T12:19:03.235513Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706363614273590:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:03.235541Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706363614273593:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706376499176789:2805] 2026-01-10T12:19:03.235548Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706363614273590:2053] Subscribe: subscriber# [3:7593706376499176788:2805], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:03.235570Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706363614273593:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:03.235602Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706363614273593:2056] Subscribe: subscriber# [3:7593706376499176789:2805], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:03.235624Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706376499176787:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706363614273587:2050] 2026-01-10T12:19:03.235693Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706376499176788:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706363614273590:2053] 2026-01-10T12:19:03.235707Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706363614273587:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706376499176787:2805] 2026-01-10T12:19:03.235718Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706376499176789:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706363614273593:2056] 2026-01-10T12:19:03.235725Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706363614273590:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706376499176788:2805] 2026-01-10T12:19:03.235743Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706363614273593:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706376499176789:2805] 2026-01-10T12:19:03.235750Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706376499176770:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706376499176783:2805] 2026-01-10T12:19:03.235780Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706376499176770:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706376499176784:2805] 2026-01-10T12:19:03.235806Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7593706376499176770:2805][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7593706363614273906:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:03.235866Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706376499176770:2805][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706376499176785:2805] 2026-01-10T12:19:03.235874Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593706363614273906:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:19:03.235898Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593706376499176770:2805][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7593706363614273906:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:03.235929Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593706363614273906:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593706376499176770:2805] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.235993Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706363614273906:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706376499176770:2805] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:03.236102Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706376499176790:2807], recipient# [3:7593706376499176765:2311], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.274700Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706363614273906:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.274873Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706363614273906:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706367909242110:2791] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:03.274973Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706376499176792:2808], recipient# [3:7593706376499176791:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-10T12:19:02.958181Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706370822041273:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:02.958221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:03.144158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:03.144334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:03.147253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:03.210441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:03.217257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:03.218175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706370822041246:2081] 1768047542956818 != 1768047542956821 2026-01-10T12:19:03.379829Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706370822041509:2105] Handle TEvNavigate describe path dc-1 2026-01-10T12:19:03.379896Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706375117009286:2459] HANDLE EvNavigateScheme dc-1 2026-01-10T12:19:03.380147Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706370822041519:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.380275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706370822041519:2107], cookie# 1 2026-01-10T12:19:03.382158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009016:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009013:2201], cookie# 1 2026-01-10T12:19:03.382223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009017:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009014:2201], cookie# 1 2026-01-10T12:19:03.382242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009018:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009015:2201], cookie# 1 2026-01-10T12:19:03.382254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041214:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009016:2201], cookie# 1 2026-01-10T12:19:03.382266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041217:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009017:2201], cookie# 1 2026-01-10T12:19:03.382312Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041220:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009018:2201], cookie# 1 2026-01-10T12:19:03.382325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009016:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041214:2049], cookie# 1 2026-01-10T12:19:03.382357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009017:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041217:2052], cookie# 1 2026-01-10T12:19:03.382386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009018:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041220:2055], cookie# 1 2026-01-10T12:19:03.382461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009013:2201], cookie# 1 2026-01-10T12:19:03.382509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706375117008976:2201][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:03.382568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009014:2201], cookie# 1 2026-01-10T12:19:03.382593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706375117008976:2201][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:03.382625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009015:2201], cookie# 1 2026-01-10T12:19:03.382641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706375117008976:2201][/dc-1] Sync cookie mismatch: sender# [1:7593706375117009015:2201], cookie# 1, current cookie# 0 2026-01-10T12:19:03.382678Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706370822041519:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:03.382831Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706370822041519:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706375117008976:2201] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.382932Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706370822041519:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706375117008976:2201] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:19:03.385696Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706375117009287:2460], recipient# [1:7593706375117009286:2459], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.385770Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706375117009286:2459] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:03.421363Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706370822041519:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.421455Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706370822041519:2107], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.421528Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706370822041519:2107], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.421569Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706370822041519:2107], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.421754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593706375117009288:2461][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:19:03.422250Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706370822041214:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706375117009294:2461] 2026-01-10T12:19:03.422298Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706370822041214:2049] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:19:03.422352Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706370822041214:2049] Subscribe: subscriber# [1:7593706375117009294:2461], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:03.422389Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706370822041217:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706375117009295:2461] 2026-01-10T12:19:03.422400Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706370822041217:2052] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:19:03.422416Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706370822041217:2052] Subscribe: subscriber# [1:7593706375117009295 ... mr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-10T12:19:03.556671Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706370822041519:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706375117009349:2498] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047543598 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.556752Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706370822041519:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706375117009349:2498] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047543598 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:19:03.556912Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706375117009356:2499], recipient# [1:7593706375117009348:2497], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:38:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.556951Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7593706375117009348:2497] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-10T12:19:03.574799Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706370822041509:2105] Handle TEvNavigate describe path /dc-1 2026-01-10T12:19:03.574842Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706375117009358:2501] HANDLE EvNavigateScheme /dc-1 2026-01-10T12:19:03.574925Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706370822041519:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.574979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706370822041519:2107], cookie# 4 2026-01-10T12:19:03.575042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009016:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009013:2201], cookie# 4 2026-01-10T12:19:03.575060Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009017:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009014:2201], cookie# 4 2026-01-10T12:19:03.575070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706375117009018:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009015:2201], cookie# 4 2026-01-10T12:19:03.575089Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041217:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009017:2201], cookie# 4 2026-01-10T12:19:03.575114Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041220:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009018:2201], cookie# 4 2026-01-10T12:19:03.575138Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370822041214:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706375117009016:2201], cookie# 4 2026-01-10T12:19:03.575149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009017:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041217:2052], cookie# 4 2026-01-10T12:19:03.575178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009018:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041220:2055], cookie# 4 2026-01-10T12:19:03.575192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706375117009016:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706370822041214:2049], cookie# 4 2026-01-10T12:19:03.575223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009014:2201], cookie# 4 2026-01-10T12:19:03.575236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706375117008976:2201][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:03.575261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009015:2201], cookie# 4 2026-01-10T12:19:03.575274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706375117008976:2201][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:03.575294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706375117008976:2201][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7593706375117009013:2201], cookie# 4 2026-01-10T12:19:03.575301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706375117008976:2201][/dc-1] Sync cookie mismatch: sender# [1:7593706375117009013:2201], cookie# 4, current cookie# 0 2026-01-10T12:19:03.575326Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706370822041519:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:03.575389Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706370822041519:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706375117008976:2201] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047543577 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.575443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706370822041519:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706375117008976:2201] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047543577 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-10T12:19:03.575552Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706375117009359:2502], recipient# [1:7593706375117009358:2501], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.575581Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706375117009358:2501] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:03.575622Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706375117009358:2501] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:19:03.576064Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706375117009358:2501] Handle TEvDescribeSchemeResult Forward to# [1:7593706375117009357:2500] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 128 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1768047543577 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] 2026-01-10T12:19:05.536816Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:256: {KV323@keyvalue_storage_read_request.cpp:256} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2026-01-10T12:19:05.541594Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1006: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2026-01-10T12:19:05.541669Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1925: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] Test command err: 2026-01-10T12:19:03.408102Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706378054487280:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:03.408471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:03.596511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:03.612147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:03.612266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:03.629189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:03.688284Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:03.689128Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706378054487175:2081] 1768047543404255 != 1768047543404258 2026-01-10T12:19:03.815860Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706378054487437:2105] Handle TEvNavigate describe path dc-1 2026-01-10T12:19:03.815915Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706378054487720:2263] HANDLE EvNavigateScheme dc-1 2026-01-10T12:19:03.816067Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706378054487445:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.816212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706378054487445:2107], cookie# 1 2026-01-10T12:19:03.817700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487623:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487620:2189], cookie# 1 2026-01-10T12:19:03.817754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487624:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487621:2189], cookie# 1 2026-01-10T12:19:03.817784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487625:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487622:2189], cookie# 1 2026-01-10T12:19:03.817808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487146:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487624:2189], cookie# 1 2026-01-10T12:19:03.817812Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487143:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487623:2189], cookie# 1 2026-01-10T12:19:03.817838Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487149:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487625:2189], cookie# 1 2026-01-10T12:19:03.817901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487624:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487146:2052], cookie# 1 2026-01-10T12:19:03.817946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487623:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487143:2049], cookie# 1 2026-01-10T12:19:03.817999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487625:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487149:2055], cookie# 1 2026-01-10T12:19:03.818039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487621:2189], cookie# 1 2026-01-10T12:19:03.818073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706378054487598:2189][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:03.818092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487620:2189], cookie# 1 2026-01-10T12:19:03.818126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706378054487598:2189][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:03.818193Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487622:2189], cookie# 1 2026-01-10T12:19:03.818208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706378054487598:2189][/dc-1] Sync cookie mismatch: sender# [1:7593706378054487622:2189], cookie# 1, current cookie# 0 2026-01-10T12:19:03.818256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706378054487445:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:03.824463Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706378054487445:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706378054487598:2189] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.824603Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706378054487445:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706378054487598:2189] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:19:03.827445Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706378054487721:2264], recipient# [1:7593706378054487720:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.827535Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706378054487720:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:03.838211Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706378054487445:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.838389Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706378054487445:2107], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.838470Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706378054487445:2107], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.838505Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706378054487445:2107], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-10T12:19:03.838683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593706378054487722:2265][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:19:03.839169Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706378054487143:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706378054487728:2265] 2026-01-10T12:19:03.839188Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706378054487143:2049] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:19:03.839254Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706378054487143:2049] Subscribe: subscriber# [1:7593706378054487728:2265], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:03.839303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706378054487146:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706378054487729:2265] 2026-01-10T12:19:03.839315Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706378054487146:2052] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:19:03.839352Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706378054487146:2052] Subscribe: subscriber# [1:7593706378054487729 ... fy# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-10T12:19:03.993023Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706378054487445:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706378054487791:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047544032 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.993076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706378054487445:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706378054487791:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1768047544032 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:19:03.993217Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706378054487798:2309], recipient# [1:7593706378054487790:2307], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.993251Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7593706378054487790:2307] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-10T12:19:04.006763Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706378054487437:2105] Handle TEvNavigate describe path /dc-1 2026-01-10T12:19:04.006813Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706382349455096:2311] HANDLE EvNavigateScheme /dc-1 2026-01-10T12:19:04.006922Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706378054487445:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:04.007009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706378054487445:2107], cookie# 4 2026-01-10T12:19:04.007091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487623:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487620:2189], cookie# 4 2026-01-10T12:19:04.007116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487624:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487621:2189], cookie# 4 2026-01-10T12:19:04.007138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706378054487625:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487622:2189], cookie# 4 2026-01-10T12:19:04.007151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487146:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487624:2189], cookie# 4 2026-01-10T12:19:04.007156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487143:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487623:2189], cookie# 4 2026-01-10T12:19:04.007188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706378054487149:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706378054487625:2189], cookie# 4 2026-01-10T12:19:04.007204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487623:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487143:2049], cookie# 4 2026-01-10T12:19:04.007226Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487624:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487146:2052], cookie# 4 2026-01-10T12:19:04.007253Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706378054487625:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487149:2055], cookie# 4 2026-01-10T12:19:04.007286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487620:2189], cookie# 4 2026-01-10T12:19:04.007303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706378054487598:2189][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:04.007330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487621:2189], cookie# 4 2026-01-10T12:19:04.007353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706378054487598:2189][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:04.007395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706378054487598:2189][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7593706378054487622:2189], cookie# 4 2026-01-10T12:19:04.007410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706378054487598:2189][/dc-1] Sync cookie mismatch: sender# [1:7593706378054487622:2189], cookie# 4, current cookie# 0 2026-01-10T12:19:04.007427Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706378054487445:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:04.007488Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706378054487445:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706378054487598:2189] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047544018 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:04.007560Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706378054487445:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706378054487598:2189] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047544018 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-10T12:19:04.007702Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706382349455097:2312], recipient# [1:7593706382349455096:2311], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:04.007743Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706382349455096:2311] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:04.007801Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706382349455096:2311] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:19:04.008371Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706382349455096:2311] Handle TEvDescribeSchemeResult Forward to# [1:7593706382349455095:2310] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1768047544018 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-10T12:19:01.054134Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706370506046299:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:01.055388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:01.211134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:01.220238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:01.220358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:01.226261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:01.312846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706370506046268:2081] 1768047541052044 != 1768047541052047 2026-01-10T12:19:01.316418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:01.401138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:01.431646Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706370506046530:2105] Handle TEvNavigate describe path dc-1 2026-01-10T12:19:01.431732Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706370506046834:2267] HANDLE EvNavigateScheme dc-1 2026-01-10T12:19:01.431893Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706370506046536:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:01.432181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706370506046693:2192][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706370506046536:2107], cookie# 1 2026-01-10T12:19:01.434013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706370506046721:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046718:2192], cookie# 1 2026-01-10T12:19:01.434061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706370506046722:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046719:2192], cookie# 1 2026-01-10T12:19:01.434085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706370506046723:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046720:2192], cookie# 1 2026-01-10T12:19:01.434109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370506046236:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046721:2192], cookie# 1 2026-01-10T12:19:01.434114Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370506046239:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046722:2192], cookie# 1 2026-01-10T12:19:01.434144Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706370506046242:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706370506046723:2192], cookie# 1 2026-01-10T12:19:01.434213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706370506046722:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046239:2052], cookie# 1 2026-01-10T12:19:01.434250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706370506046721:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046236:2049], cookie# 1 2026-01-10T12:19:01.434274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706370506046723:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046242:2055], cookie# 1 2026-01-10T12:19:01.434322Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706370506046693:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046719:2192], cookie# 1 2026-01-10T12:19:01.434379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706370506046693:2192][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:01.434425Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706370506046693:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046718:2192], cookie# 1 2026-01-10T12:19:01.434453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706370506046693:2192][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:01.434497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706370506046693:2192][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706370506046720:2192], cookie# 1 2026-01-10T12:19:01.434514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706370506046693:2192][/dc-1] Sync cookie mismatch: sender# [1:7593706370506046720:2192], cookie# 1, current cookie# 0 2026-01-10T12:19:01.434592Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706370506046536:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:01.446128Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706370506046536:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706370506046693:2192] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:01.446307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706370506046536:2107], cacheItem# { Subscriber: { Subscriber: [1:7593706370506046693:2192] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:19:01.449627Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706370506046835:2268], recipient# [1:7593706370506046834:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:01.449719Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706370506046834:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:01.471437Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706370506046834:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:19:01.474023Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706370506046834:2267] Handle TEvDescribeSchemeResult Forward to# [1:7593706370506046833:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:19:01.475630Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593706370506046530:2105] Handle TEvProposeTransaction 2026-01-10T12:19:01.475649Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593706370506046530:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:19:01.475709Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593706370506046530:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593706370506046840:2272] 2026-01-10T12:19:01.626613Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593706370506046840:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" ... move, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2026-01-10T12:19:03.715778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [2:7593706376957408589:2285] 2026-01-10T12:19:03.715985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:03.716010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-10T12:19:03.716056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:03.716173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2026-01-10T12:19:03.716211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2026-01-10T12:19:03.716277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:03.716293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-10T12:19:03.716326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:19:03.716425Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:03.716554Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:03.716990Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7593706376957407976:2104] Handle TEvNavigate describe path /dc-1 2026-01-10T12:19:03.717029Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7593706376957408606:2519] HANDLE EvNavigateScheme /dc-1 2026-01-10T12:19:03.717114Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7593706376957408028:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:03.717221Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7593706376957408128:2149][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7593706376957408028:2119], cookie# 4 2026-01-10T12:19:03.717283Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706376957408136:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408133:2149], cookie# 4 2026-01-10T12:19:03.717310Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706376957408137:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408134:2149], cookie# 4 2026-01-10T12:19:03.717324Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7593706376957408138:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408135:2149], cookie# 4 2026-01-10T12:19:03.717347Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706376957407744:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408137:2149], cookie# 4 2026-01-10T12:19:03.717354Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706376957407741:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408136:2149], cookie# 4 2026-01-10T12:19:03.717368Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7593706376957407747:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7593706376957408138:2149], cookie# 4 2026-01-10T12:19:03.717403Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706376957408137:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957407744:2052], cookie# 4 2026-01-10T12:19:03.717440Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706376957408136:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957407741:2049], cookie# 4 2026-01-10T12:19:03.717478Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7593706376957408138:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957407747:2055], cookie# 4 2026-01-10T12:19:03.717524Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706376957408128:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957408134:2149], cookie# 4 2026-01-10T12:19:03.717557Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7593706376957408128:2149][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:19:03.717586Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706376957408128:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957408133:2149], cookie# 4 2026-01-10T12:19:03.717606Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7593706376957408128:2149][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:19:03.717668Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7593706376957408128:2149][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7593706376957408135:2149], cookie# 4 2026-01-10T12:19:03.717695Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7593706376957408128:2149][/dc-1] Sync cookie mismatch: sender# [2:7593706376957408135:2149], cookie# 4, current cookie# 0 2026-01-10T12:19:03.717713Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7593706376957408028:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:19:03.717774Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7593706376957408028:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7593706376957408128:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047543724 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:03.717871Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7593706376957408028:2119], cacheItem# { Subscriber: { Subscriber: [2:7593706376957408128:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1768047543724 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-10T12:19:03.718027Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7593706376957408607:2520], recipient# [2:7593706376957408606:2519], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:19:03.718082Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7593706376957408606:2519] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:19:03.718146Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7593706376957408606:2519] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:19:03.718690Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7593706376957408606:2519] Handle TEvDescribeSchemeResult Forward to# [2:7593706376957408605:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1768047543724 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [1:125:2150] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:134:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-01-10T12:17:19.675825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:19.675901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.675961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:19.676004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:19.676048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:19.676105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:19.676181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.676260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:19.677096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:19.677344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:19.799333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:17:19.799398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:19.800291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:188:2058] recipient: [1:15:2062] 2026-01-10T12:17:19.811359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:19.811910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:19.812119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:19.818044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:19.818272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:19.818979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.819650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:19.822126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.822309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:19.823457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:19.823520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.823750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:19.823807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:19.823854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:19.823941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.984826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.985765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.985876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.985967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.986777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... ESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2026-01-10T12:19:06.005220Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2026-01-10T12:19:06.005256Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:19:06.005293Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:19:06.005375Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1002 2026-01-10T12:19:06.007957Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1104 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-10T12:19:06.008008Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-10T12:19:06.008176Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1104 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-10T12:19:06.008295Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1104 } } CommitVersion { Step: 5000039 TxId: 1002 } debug: NTableState::TProposedWaitParts operationId# 1002:0 2026-01-10T12:19:06.009414Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-10T12:19:06.009461Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-10T12:19:06.009564Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-10T12:19:06.009616Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:19:06.009692Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-10T12:19:06.009751Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:06.009787Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-10T12:19:06.009826Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:19:06.009865Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1002:0 129 -> 240 2026-01-10T12:19:06.011987Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-10T12:19:06.012229Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-10T12:19:06.013869Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-10T12:19:06.013999Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-10T12:19:06.014296Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-10T12:19:06.014344Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2026-01-10T12:19:06.014438Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-10T12:19:06.014471Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-10T12:19:06.014507Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-10T12:19:06.014539Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-10T12:19:06.014575Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2026-01-10T12:19:06.014615Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-10T12:19:06.014653Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1002:0 2026-01-10T12:19:06.014687Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1002:0 2026-01-10T12:19:06.014799Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2026-01-10T12:19:06.017429Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2026-01-10T12:19:06.017478Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2026-01-10T12:19:06.017817Z node 72 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2026-01-10T12:19:06.017909Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2026-01-10T12:19:06.017943Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:811:2782] TestWaitNotification: OK eventTxId 1002 2026-01-10T12:19:06.018355Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:19:06.018564Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 243us result status StatusSuccess 2026-01-10T12:19:06.019061Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:18:58.200939Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706356943351701:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:58.201026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:58.364150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:58.383516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:58.383588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:58.389548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:58.448198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:58.567097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:18:58.625420Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706356943351938:2130] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:58.625487Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706356943352408:2441] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:58.625638Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706356943351965:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:58.625778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706356943352186:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706356943351965:2145], cookie# 1 2026-01-10T12:18:58.627177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706356943352236:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352233:2291], cookie# 1 2026-01-10T12:18:58.627211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706356943352237:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352234:2291], cookie# 1 2026-01-10T12:18:58.627222Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706356943352238:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352235:2291], cookie# 1 2026-01-10T12:18:58.627286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706356943351605:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352236:2291], cookie# 1 2026-01-10T12:18:58.627308Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706356943351608:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352237:2291], cookie# 1 2026-01-10T12:18:58.627339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706356943351611:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706356943352238:2291], cookie# 1 2026-01-10T12:18:58.627376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706356943352236:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943351605:2050], cookie# 1 2026-01-10T12:18:58.627410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706356943352237:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943351608:2053], cookie# 1 2026-01-10T12:18:58.627438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706356943352238:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943351611:2056], cookie# 1 2026-01-10T12:18:58.627521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706356943352186:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943352233:2291], cookie# 1 2026-01-10T12:18:58.627581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706356943352186:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:58.627613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706356943352186:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943352234:2291], cookie# 1 2026-01-10T12:18:58.627663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706356943352186:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:58.627739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706356943352186:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706356943352235:2291], cookie# 1 2026-01-10T12:18:58.627751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706356943351965:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:58.627768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706356943352186:2291][/dc-1] Sync cookie mismatch: sender# [1:7593706356943352235:2291], cookie# 1, current cookie# 0 2026-01-10T12:18:58.631973Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706356943351965:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706356943352186:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:58.632095Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706356943351965:2145], cacheItem# { Subscriber: { Subscriber: [1:7593706356943352186:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:18:58.639514Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706356943352409:2442], recipient# [1:7593706356943352408:2441], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:58.639637Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706356943352408:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:18:58.684560Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706356943352408:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:18:58.687416Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706356943352408:2441] Handle TEvDescribeSchemeResult Forward to# [1:7593706356943352407:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:18:58.688898Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593706356943351938:2130] Handle TEvProposeTransaction 2026-01-10T12:18:58.688928Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593706356943351938:2130] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:18:58.689018Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593706356943351938:2130] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593706356943352414:2446] 2026-01-10T12:18:58.755518Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593706356943352414:2446] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-10T12:18:58.755592Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... eBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706387555147059:2931] 2026-01-10T12:19:05.824669Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593706374670244018:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593706387555147046:2932] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:05.824671Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:19:05.824739Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706374670244018:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706387555147046:2932] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:05.824763Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593706374670244018:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:19:05.824791Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593706374670244018:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593706387555147044:2931] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:05.824825Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706374670244018:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706387555147044:2931] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:05.824882Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706387555147066:2934], recipient# [3:7593706387555147042:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:05.824913Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706374670243701:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706387555147063:2933] 2026-01-10T12:19:05.824923Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706374670243701:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:05.824944Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706374670243701:2050] Subscribe: subscriber# [3:7593706387555147063:2933], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:05.824965Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706374670243704:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706387555147064:2933] 2026-01-10T12:19:05.824972Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706374670243704:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:05.824985Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706374670243704:2053] Subscribe: subscriber# [3:7593706387555147064:2933], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:05.825012Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7593706374670243707:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7593706387555147065:2933] 2026-01-10T12:19:05.825018Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7593706374670243707:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:19:05.825018Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706387555147063:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706374670243701:2050] 2026-01-10T12:19:05.825039Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7593706374670243707:2056] Subscribe: subscriber# [3:7593706387555147065:2933], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:19:05.825051Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706387555147064:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706374670243704:2053] 2026-01-10T12:19:05.825071Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706374670243701:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706387555147063:2933] 2026-01-10T12:19:05.825076Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7593706387555147065:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706374670243707:2056] 2026-01-10T12:19:05.825086Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706374670243704:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706387555147064:2933] 2026-01-10T12:19:05.825096Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7593706374670243707:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7593706387555147065:2933] 2026-01-10T12:19:05.825103Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706387555147060:2933] 2026-01-10T12:19:05.825146Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706387555147061:2933] 2026-01-10T12:19:05.825171Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7593706374670244018:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:05.825213Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7593706374670244018:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:19:05.825245Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7593706387555147062:2933] 2026-01-10T12:19:05.825268Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7593706374670244018:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7593706387555147047:2933] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:19:05.825271Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7593706387555147047:2933][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7593706374670244018:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:19:05.825311Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706374670244018:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706387555147047:2933] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:05.825370Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706387555147067:2935], recipient# [3:7593706387555147040:2313], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> DstCreator::WithSyncIndex |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> DstCreator::WithIntermediateDir >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> TKeyValueTest::TestRenameWorks [GOOD] >> Cdc::AddIndex >> TKeyValueTest::TestRenameToLongKey >> DstCreator::GlobalConsistency >> DstCreator::WithSyncIndexAndIntermediateDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:94:2057] recipient: [13:93:2122] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:96:2057] recipient: [13:93:2122] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:95:2123] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:211:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:92:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:95:2057] recipient: [15:94:2122] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:97:2057] recipient: [15:94:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:96:2123] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:212:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> DstCreator::NonExistentSrc >> DstCreator::ExistingDst >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2026-01-10T12:15:33.193416Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705475190013796:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:33.208881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:33.799176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:33.799305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:33.846114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:33.912637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:33.989344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:34.267009Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:34.267636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:34.267645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:34.267654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:34.267738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:34.303690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:34.374764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:34.386038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2026-01-10T12:15:34.389242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:34.425247Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7593705479484981886:2297] 2026-01-10T12:15:34.425492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:34.452237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:34.452337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:34.454734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:34.454806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:34.454856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:34.455238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:34.455290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:34.455315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7593705479484981901:2297] in generation 1 2026-01-10T12:15:34.456809Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:34.506261Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:34.506436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:34.506493Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7593705479484981903:2298] 2026-01-10T12:15:34.506525Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.506542Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:34.506552Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.506726Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:34.506821Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:34.506842Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.506856Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.506870Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:34.506882Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.509429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705479484981884:2503], serverId# [1:7593705479484981887:2504], sessionId# [0:0:0] 2026-01-10T12:15:34.509577Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:34.509906Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:34.510025Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-10T12:15:34.511819Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.512113Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:34.512184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:34.517627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705479484981917:2520], serverId# [1:7593705479484981919:2522], sessionId# [0:0:0] 2026-01-10T12:15:34.522641Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1768047334557 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047334557 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:34.522839Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.522987Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.523112Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.523127Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:34.523149Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768047334557:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:34.523469Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1768047334557:281474976710657 keys extracted: 0 2026-01-10T12:15:34.523610Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:34.523834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.523901Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:34.532690Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:34.533285Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.536066Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1768047334557} 2026-01-10T12:15:34.536129Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.536171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1768047334556 2026-01-10T12:15:34.536183Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.536204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1768047334564 2026-01-10T12:15:34.537593Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.537614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.537629Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:34.537746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047334557 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7593705475190014102:2151], exec latency: 9 ms, propose latency: 14 ms 2026-01-10T12:15:34.537781Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:15:34.537834Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.539826Z node 1 :CH ... artition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:08.699871Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:08.699905Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:08.721052Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:08.721117Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:08.721152Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:08.721192Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:08.721224Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:08.721915Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2026-01-10T12:19:08.722015Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:08.722156Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:42: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2026-01-10T12:19:08.722395Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-10T12:19:08.733508Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:78: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2026-01-10T12:19:08.733588Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 } 2026-01-10T12:19:08.733660Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:08.733712Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3842: Waiting for PlanStep# 12000 from mediator time cast 2026-01-10T12:19:08.733822Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][30:903:2775] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-10T12:19:08.733979Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][30:1160:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-10T12:19:08.734180Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2026-01-10T12:19:08.734399Z node 30 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [30:1160:2950], at tablet# 72075186224037888 2026-01-10T12:19:08.734452Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2026-01-10T12:19:08.734571Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][30:1160:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-10T12:19:08.734716Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1243:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-10T12:19:08.734949Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:08.734997Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:19:08.735099Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2026-01-10T12:19:08.735242Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:08.735270Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:19:08.735327Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 5 size 26 offset: -1 2026-01-10T12:19:08.735384Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:19:08.735440Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:08.735471Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:19:08.735550Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:19:08.735584Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:08.735620Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:19:08.735723Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-10T12:19:08.735774Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:19:08.735802Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:19:08.735839Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:08.735881Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-10T12:19:08.735966Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-10T12:19:08.745374Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 107 count 1 nextOffset 5 batches 1 2026-01-10T12:19:08.745910Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 4 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000? size 93 WTime 8950 2026-01-10T12:19:08.746228Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:19:08.746357Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 93 2026-01-10T12:19:08.747415Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 1 size 93 actorID [30:1117:2903] 2026-01-10T12:19:08.747504Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:19:08.747631Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' size 93 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-10T12:19:08.748495Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:08.748551Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:19:08.748717Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2026-01-10T12:19:08.749650Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2026-01-10T12:19:08.749755Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-10T12:19:08.749914Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:07.000000Z 2026-01-10T12:19:08.749996Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-10T12:19:08.750149Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2026-01-10T12:19:08.750346Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-10T12:19:08.750428Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:08.750549Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-10T12:19:08.750869Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2026-01-10T12:19:08.751030Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-10T12:19:08.751773Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> TPQTest::TestReadSubscription [GOOD] >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> DstCreator::ReplicationModeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: 2026-01-10T12:18:24.493357Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:24.601601Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.601667Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.601742Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.601823Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:24.624989Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.649543Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2026-01-10T12:18:24.650946Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:24.654160Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:24.659731Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:24.661741Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:24.665639Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [1:191:2142] 2026-01-10T12:18:24.667785Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:191:2142] 2026-01-10T12:18:24.682307Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:211:2209], now have 1 active actors on pipe 2026-01-10T12:18:24.682457Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:24.682512Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:24.682809Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-10T12:18:24.682866Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-10T12:18:24.682966Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:18:24.683338Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2026-01-10T12:18:24.683395Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:24.683461Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:24.683513Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:24.683559Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:24.683645Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:24.683740Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2026-01-10T12:18:24.683778Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:24.684125Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:213:2211], now have 1 active actors on pipe 2026-01-10T12:18:24.684233Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:24.684278Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:24.684377Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1d264de4-36b683f4-126fa8e8-79231b96_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:24.684448Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:24.684498Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:24.684547Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:18:24.684621Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:24.684671Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:24.684731Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:18:24.684772Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:18:24.684813Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:24.684879Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-10T12:18:24.684984Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:18:24.685313Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:215:2213], now have 1 active actors on pipe 2026-01-10T12:18:24.685428Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:24.685464Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:18:24.685523Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-10T12:18:24.685572Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-10T12:18:24.685634Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:18:24.685683Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:24.685721Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:24.685764Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:18:24.685795Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:24.685823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:24.685860Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:24.685995Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2026-01-10T12:18:24.687129Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2026-01-10T12:18:24.687204Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:18:24.687247Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2026-01-10T12:18:24.687296Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2026-01-10T12:18:24.687324Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:18:24.687356Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:18:24.687414Z node 1 :PERSQU ... [4][StateInit] bootstrapping 4 [56:194:2142] 2026-01-10T12:19:08.877310Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [56:194:2142] 2026-01-10T12:19:08.887410Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:08.887858Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b721097d-2c1e6834-615b6f6c-83128cb2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:08.905981Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:08.937194Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:08.999622Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.041143Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.125694Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.218750Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.239663Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.322440Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.458074Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.478952Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.665716Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.696748Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.864924Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:09.865397Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7d390bf9-5fad8f32-7bbfba03-ddc6a367_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:09.872464Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:09.872873Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5842199-ea5b05f9-6a702e4e-94b10247_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:10.192468Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:107:2057] recipient: [57:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:107:2057] recipient: [57:105:2138] Leader for TabletID 72057594037927937 is [57:111:2142] sender: [57:112:2057] recipient: [57:105:2138] 2026-01-10T12:19:10.249524Z node 57 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:10.249595Z node 57 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:10.249642Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:10.249688Z node 57 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:153:2057] recipient: [57:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:153:2057] recipient: [57:151:2172] Leader for TabletID 72057594037927938 is [57:157:2176] sender: [57:158:2057] recipient: [57:151:2172] Leader for TabletID 72057594037927937 is [57:111:2142] sender: [57:183:2057] recipient: [57:14:2061] 2026-01-10T12:19:10.268266Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:10.270051Z node 57 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 60 actor [57:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 60 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 } Consumers { Name: "user1" Generation: 60 Important: true } 2026-01-10T12:19:10.271035Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [57:189:2142] 2026-01-10T12:19:10.273246Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [57:189:2142] 2026-01-10T12:19:10.275916Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [57:190:2142] 2026-01-10T12:19:10.277975Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [57:190:2142] 2026-01-10T12:19:10.281120Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [57:191:2142] 2026-01-10T12:19:10.283083Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [57:191:2142] 2026-01-10T12:19:10.285217Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [57:192:2142] 2026-01-10T12:19:10.286733Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [57:192:2142] 2026-01-10T12:19:10.288904Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [57:193:2142] 2026-01-10T12:19:10.290575Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [57:193:2142] 2026-01-10T12:19:10.301947Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:10.302246Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4e659249-94639d0f-e74d7652-46bbd13c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:10.328038Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.411809Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.432921Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.527904Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.579544Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.631399Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.724564Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.735245Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.881795Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.964685Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.080909Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.237983Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.282303Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:11.282660Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|70bd859f-c907fc83-8cc0972f-58b5f090_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:11.288628Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:11.288924Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|103e144e-bbd017be-a5d5a136-a734d1c8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> DstCreator::WithSyncIndex [GOOD] >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2026-01-10T12:15:30.820258Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593705464405995038:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:15:30.820343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:15:31.199592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:15:31.206813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:31.206907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:31.390976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:31.393051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:31.433289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:15:31.720482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:15:31.720506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:15:31.720520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:15:31.720651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:15:31.780365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:31.792877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:31.842280Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7593705468700963127:2297] 2026-01-10T12:15:31.842505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:31.873613Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:31.876450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:31.876561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:31.878392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:31.878446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:31.878498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:31.878893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:31.878931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:31.878955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7593705468700963142:2297] in generation 1 2026-01-10T12:15:31.884568Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:31.943366Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:31.943548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:31.943605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7593705468700963144:2298] 2026-01-10T12:15:31.943615Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:31.943627Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:31.943638Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:31.943859Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:31.943951Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:31.943977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:31.943991Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:31.944005Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:31.944043Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:31.945569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705468700963115:2499], serverId# [1:7593705468700963128:2503], sessionId# [0:0:0] 2026-01-10T12:15:31.945703Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:31.946032Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:31.946141Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-10T12:15:31.948035Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:31.954357Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:31.954445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:31.959638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593705468700963159:2520], serverId# [1:7593705468700963160:2521], sessionId# [0:0:0] 2026-01-10T12:15:31.969520Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1768047332002 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047332002 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:31.969574Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:31.970357Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:31.970459Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:31.970475Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:31.970499Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1768047332002:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:31.970820Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1768047332002:281474976710657 keys extracted: 0 2026-01-10T12:15:31.970992Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:31.971180Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:31.971238Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:31.977048Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:31.978700Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:31.979864Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1768047332002} 2026-01-10T12:15:31.979907Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:31.979952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1768047332001 2026-01-10T12:15:31.979966Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:31.979994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1768047332009 2026-01-10T12:15:31.981274Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:31.981308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:31.981329Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:31.981377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1768047332002 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7593705464405995332:2142], exec latency: 6 ms, propose latency: 10 ms 2026-01-10T12:15:31.981437Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-10T12:15:31.981475Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:31.982861Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7593705468700963144:2298][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2026-01-1 ... topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:19:10.233078Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2026-01-10T12:19:10.233206Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-10T12:19:10.233345Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:10.233438Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-10T12:19:10.233527Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:19:10.233603Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:10.233702Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-10T12:19:10.233830Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-10T12:19:10.233952Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:19:10.234037Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:19:10.234141Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:10.234259Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-10T12:19:10.234564Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-10T12:19:10.235641Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2026-01-10T12:19:10.237146Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2026-01-10T12:19:10.237864Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:19:10.238261Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2026-01-10T12:19:10.239982Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [30:1016:2835] 2026-01-10T12:19:10.240248Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2026-01-10T12:19:10.240421Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:19:10.251035Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:492: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-10T12:19:10.251244Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:19:10.251415Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-10T12:19:10.251620Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2026-01-10T12:19:10.252126Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:10.252226Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:10.252297Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:10.252401Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:10.252481Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:10.252593Z node 30 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:19:10.252743Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2026-01-10T12:19:10.253114Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1111:2879] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2026-01-10T12:19:10.253295Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][30:1048:2879] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2026-01-10T12:19:10.253540Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2026-01-10T12:19:10.253630Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2026-01-10T12:19:10.254319Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-10T12:19:10.364376Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:10.364556Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-10T12:19:10.364938Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2026-01-10T12:19:10.367078Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 10 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2026-01-10T12:19:10.367211Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 10. Send blob request. 2026-01-10T12:19:10.367419Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 5 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-10T12:19:10.367508Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 2 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-10T12:19:10.367570Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-10T12:19:10.367611Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-10T12:19:10.367726Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-10T12:19:10.367791Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-10T12:19:10.367915Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 10. All 6 blobs are from cache. 2026-01-10T12:19:10.368136Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368227Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368279Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368328Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368379Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368428Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:10.368619Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 6 blobs 2026-01-10T12:19:10.369135Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-10T12:19:10.369299Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2026-01-10T12:19:10.369384Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-10T12:19:10.369459Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-10T12:19:10.369537Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-10T12:19:10.369607Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-10T12:19:10.369884Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:18:56.610089Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706346985586530:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:56.610176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:56.780317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:56.801316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:56.801433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:56.807880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:56.858156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:56.859510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706346985586483:2082] 1768047536608292 != 1768047536608295 2026-01-10T12:18:57.007907Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706346985586765:2118] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:57.007972Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706351280554539:2446] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:57.008136Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706346985586772:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:57.008245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706346985587017:2284][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706346985586772:2120], cookie# 1 2026-01-10T12:18:57.009588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346985587053:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587050:2284], cookie# 1 2026-01-10T12:18:57.009622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346985587054:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587051:2284], cookie# 1 2026-01-10T12:18:57.009658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706346985587055:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587052:2284], cookie# 1 2026-01-10T12:18:57.009689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346985586454:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587054:2284], cookie# 1 2026-01-10T12:18:57.009694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346985586451:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587053:2284], cookie# 1 2026-01-10T12:18:57.009740Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706346985586457:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706346985587055:2284], cookie# 1 2026-01-10T12:18:57.009782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346985587054:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985586454:2053], cookie# 1 2026-01-10T12:18:57.009802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346985587053:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985586451:2050], cookie# 1 2026-01-10T12:18:57.009816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706346985587055:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985586457:2056], cookie# 1 2026-01-10T12:18:57.009880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346985587017:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985587051:2284], cookie# 1 2026-01-10T12:18:57.009945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706346985587017:2284][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:57.009976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346985587017:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985587050:2284], cookie# 1 2026-01-10T12:18:57.010017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706346985587017:2284][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:57.010075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706346985587017:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706346985587052:2284], cookie# 1 2026-01-10T12:18:57.010106Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706346985586772:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:57.010106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706346985587017:2284][/dc-1] Sync cookie mismatch: sender# [1:7593706346985587052:2284], cookie# 1, current cookie# 0 2026-01-10T12:18:57.014366Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706346985586772:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706346985587017:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:57.014462Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706346985586772:2120], cacheItem# { Subscriber: { Subscriber: [1:7593706346985587017:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:18:57.014591Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706346985586772:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:57.014640Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346985586772:2120], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-10T12:18:57.014688Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346985586772:2120], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-10T12:18:57.014735Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7593706346985586772:2120], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-10T12:18:57.014884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7593706351280554541:2448][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:18:57.015290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346985586451:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706351280554547:2448] 2026-01-10T12:18:57.015308Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346985586451:2050] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:57.015375Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346985586451:2050] Subscribe: subscriber# [1:7593706351280554547:2448], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:57.015415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346985586454:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706351280554548:2448] 2026-01-10T12:18:57.015425Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346985586454:2053] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:57.015462Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346985586454:2053] Subscribe: subscriber# [1:7593706351280554548:2448], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:57.015531Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7593706346985586457:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7593706351280554549:2448] 2026-01-10T12:18:57.015543Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7593706346985586457:2056] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-10T12:18:57.015560Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7593706346985586457:2056] Subscribe: subscriber# [1:7593706351280554549:2448], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:18:57.015611Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7593706 ... : 0 } 2026-01-10T12:19:07.948863Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706392719522693:3589], recipient# [3:7593706392719522692:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:08.371085Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:08.371288Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706388424554698:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:08.371438Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706397014490010:3593], recipient# [3:7593706397014490009:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:08.944781Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706375539651368:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:08.944869Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:08.949558Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:08.949700Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706379834620046:3011] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:08.949807Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706397014490016:3596], recipient# [3:7593706397014490015:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.371970Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.372139Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706388424554698:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:09.372238Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706401309457335:3602], recipient# [3:7593706401309457334:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.945384Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.945542Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706379834620046:3011] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:09.945665Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706401309457337:3603], recipient# [3:7593706401309457336:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.950580Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:09.950717Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706379834620046:3011] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:09.950812Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706401309457340:3605], recipient# [3:7593706401309457338:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:10.372956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706375539651579:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:10.373100Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706375539651579:2121], cacheItem# { Subscriber: { Subscriber: [3:7593706388424554698:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:10.373204Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706405604424655:3608], recipient# [3:7593706405604424654:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2026-01-10T12:19:09.114217Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706404075203189:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:09.114320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:09.296167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:09.296253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:09.344244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:09.348109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:09.348773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:09.349082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706404075203153:2081] 1768047549113062 != 1768047549113065 2026-01-10T12:19:09.521953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:09.544857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:09.544906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:09.544914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:09.545011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:09.768036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:09.777280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:10.081192Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:10.081215Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:10.081707Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:10.119072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:11.636747Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047550038, tx_id: 281474976710658 } } } 2026-01-10T12:19:11.637138Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:11.638812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.640211Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:19:11.640228Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:19:11.665685Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:19:11.704289Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047551704 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 Pl ... ndexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 43 PathOwnerId: 72057594046644480 } 2026-01-10T12:19:11.711820Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 43] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047551704 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::Basic >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2026-01-10T12:19:10.137376Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706407605878130:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:10.137519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:10.340199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:10.373915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:10.374025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:10.402336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:10.451207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:10.453817Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706407605878085:2081] 1768047550135787 != 1768047550135790 2026-01-10T12:19:10.546383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:10.633780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:10.633807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:10.633813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:10.633927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:10.830408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:10.839108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.122517Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:11.122542Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:11.122947Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:11.144791Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:12.713958Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047551102, tx_id: 281474976710658 } } } 2026-01-10T12:19:12.714423Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:12.716738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:12.718419Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:19:12.718435Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:19:12.745721Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:19:12.777411Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047552789 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 43 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: ... ImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 44 PathOwnerId: 72057594046644480 } 2026-01-10T12:19:12.785118Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 44] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047552789 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnsSizeMismatch >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestSingleFailureMirror >> KqpRm::SnapshotSharingByExchanger >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-10T12:18:59.194964Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706358339262153:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:59.195693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:59.393685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:18:59.406636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:59.406748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:59.414395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:59.469027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:59.602443Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593706358339262401:2137] Handle TEvNavigate describe path dc-1 2026-01-10T12:18:59.602496Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593706358339262847:2443] HANDLE EvNavigateScheme dc-1 2026-01-10T12:18:59.602667Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706358339262408:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:18:59.602787Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593706358339262648:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593706358339262408:2140], cookie# 1 2026-01-10T12:18:59.604281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706358339262681:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262677:2299], cookie# 1 2026-01-10T12:18:59.604315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706358339262683:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262678:2299], cookie# 1 2026-01-10T12:18:59.604325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593706358339262686:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262679:2299], cookie# 1 2026-01-10T12:18:59.604388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706358339262059:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262681:2299], cookie# 1 2026-01-10T12:18:59.604410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706358339262062:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262683:2299], cookie# 1 2026-01-10T12:18:59.604439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593706358339262065:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593706358339262686:2299], cookie# 1 2026-01-10T12:18:59.604508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706358339262681:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262059:2050], cookie# 1 2026-01-10T12:18:59.604535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706358339262683:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262062:2053], cookie# 1 2026-01-10T12:18:59.604557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593706358339262686:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262065:2056], cookie# 1 2026-01-10T12:18:59.604634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706358339262648:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262677:2299], cookie# 1 2026-01-10T12:18:59.604663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593706358339262648:2299][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:18:59.604735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706358339262648:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262678:2299], cookie# 1 2026-01-10T12:18:59.604812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593706358339262648:2299][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:18:59.604883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593706358339262648:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593706358339262679:2299], cookie# 1 2026-01-10T12:18:59.604903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593706358339262648:2299][/dc-1] Sync cookie mismatch: sender# [1:7593706358339262679:2299], cookie# 1, current cookie# 0 2026-01-10T12:18:59.604983Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593706358339262408:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:18:59.610951Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593706358339262408:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593706358339262648:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:18:59.611167Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593706358339262408:2140], cacheItem# { Subscriber: { Subscriber: [1:7593706358339262648:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:18:59.614369Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593706358339262848:2444], recipient# [1:7593706358339262847:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:18:59.614467Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593706358339262847:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:18:59.656526Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593706358339262847:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:18:59.660180Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593706358339262847:2443] Handle TEvDescribeSchemeResult Forward to# [1:7593706358339262846:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:18:59.661633Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593706358339262401:2137] Handle TEvProposeTransaction 2026-01-10T12:18:59.661656Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593706358339262401:2137] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:18:59.661748Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593706358339262401:2137] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593706358339262853:2448] 2026-01-10T12:18:59.692015Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593706358339262408:2140], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId ... : 0 } 2026-01-10T12:19:10.846658Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706408449044943:4062], recipient# [3:7593706408449044942:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:11.505372Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:11.505541Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706404154076643:3190] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:11.505676Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706412744012259:4066], recipient# [3:7593706412744012258:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:11.843076Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706391269173132:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:11.843170Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:11.847338Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:11.847480Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706395564141986:3178] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:11.847620Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706412744012263:4069], recipient# [3:7593706412744012262:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.506264Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.506427Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706404154076643:3190] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:12.506540Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706417038979581:4075], recipient# [3:7593706417038979580:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.843690Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.843825Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706395564141986:3178] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:12.843944Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706417038979583:4076], recipient# [3:7593706417038979582:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.848289Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:12.848415Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706395564141986:3178] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:12.848495Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706417038979585:4077], recipient# [3:7593706417038979584:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:13.507285Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7593706391269173330:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:19:13.507444Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7593706391269173330:2120], cacheItem# { Subscriber: { Subscriber: [3:7593706404154076643:3190] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:19:13.507547Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7593706421333946901:4081], recipient# [3:7593706421333946900:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> TBlobStorageProxyTest::TestVPutVCollectVGetRace |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> DstCreator::EmptyReplicationConfig [GOOD] |91.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2026-01-10T12:19:09.781261Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706401791307531:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:09.781394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:09.958612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:09.963309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:09.963409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:09.965166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:10.055399Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:10.155462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:10.234475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:10.234501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:10.234514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:10.234621Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:10.462214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:10.469280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:10.557454Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:10.557471Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:10.558042Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:10.787494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:12.136792Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047550556, tx_id: 281474976710658 } } } 2026-01-10T12:19:12.137141Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:12.139376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:12.140384Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:19:12.140427Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:19:12.162222Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:19:12.162250Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-10T12:19:12.736872Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706417097703697:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:12.736919Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:12.743955Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:12.837110Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706417097703665:2081] 1768047552735945 != 1768047552735948 2026-01-10T12:19:12.837216Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:12.846403Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:12.853751Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:12.853837Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:12.874545Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:13.070521Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:13.070539Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:13.070544Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:13.070599Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:13.122076Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:13.283572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:13.291380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:13.552601Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:13.552625Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:13.553148Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:13.741162Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:15.046765Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047553552, tx_id: 281474976715658 } } } 2026-01-10T12:19:15.047179Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:15.048899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.049632Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-10T12:19:15.049657Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-10T12:19:15.073662Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-10T12:19:15.073692Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 41] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2026-01-10T12:19:10.021639Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706406068332781:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:10.021707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:10.212271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:10.244771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:10.244894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:10.289579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:10.326991Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:10.332515Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706406068332748:2081] 1768047550020306 != 1768047550020309 2026-01-10T12:19:10.412922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:10.509279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:10.509312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:10.509322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:10.509528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:10.701212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:10.707616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:10.797961Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:10.797982Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:10.798399Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:11.027087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:12.275677Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047550794, tx_id: 281474976710658 } } } 2026-01-10T12:19:12.276136Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:12.278102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:12.278886Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:19:12.278922Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:19:12.303033Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:19:12.303062Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:12.772331Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706414734811696:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:12.772430Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:12.783844Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:12.857427Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:12.858981Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706414734811655:2081] 1768047552771122 != 1768047552771125 2026-01-10T12:19:12.883437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:12.883511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:12.885359Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:12.959635Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:13.006571Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:13.006597Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:13.006603Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:13.006693Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:13.208374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:13.213784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:13.236508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:13.261143Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:13.261168Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:13.261461Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:13.776579Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:15.170473Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047553279, tx_id: 281474976710658 } } } 2026-01-10T12:19:15.170747Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:15.197220Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:15.199695Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047553300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:15.199936Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2026-01-10T12:19:10.735676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706408651290077:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:10.735794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:10.931852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:10.931971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:10.955291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:10.972617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:10.973972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:10.980648Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706408651290030:2081] 1768047550734077 != 1768047550734080 2026-01-10T12:19:11.135665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:11.201866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:11.201888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:11.201913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:11.202031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:11.425789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:11.436534Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:11.436567Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:11.437056Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:11.742611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:13.043456Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-10T12:19:13.043489Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2026-01-10T12:19:13.439779Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706420858617172:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:13.439837Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:13.450811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:13.519537Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706420858617135:2081] 1768047553439127 != 1768047553439130 2026-01-10T12:19:13.535762Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:13.550257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:13.550329Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:13.552062Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:13.721058Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:13.721088Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:13.721095Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:13.721171Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:13.741700Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:13.967906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:13.975801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.061208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.080751Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:14.080768Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:14.081062Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:14.445912Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:15.510036Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047554056, tx_id: 281474976715658 } } } 2026-01-10T12:19:15.510336Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:15.539033Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:15.541995Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1768047554119 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:15.542253Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2026-01-10T12:19:10.893441Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706408511320409:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:10.893564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:11.051411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:11.051584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:11.085815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:11.119961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:11.122502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:11.337282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:11.337307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:11.337315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:11.337435Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:11.376818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:11.544787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:11.551333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.639447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.664702Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:11.664731Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:11.665141Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:11.900204Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:13.151502Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047551634, tx_id: 281474976710658 } } } 2026-01-10T12:19:13.151897Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:13.195412Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:13.205250Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047551704 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:13.205529Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:13.640420Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706421077070684:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:13.640463Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:13.649443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:13.706236Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:13.707956Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706421077070652:2081] 1768047553639696 != 1768047553639699 2026-01-10T12:19:13.752124Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:13.752215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:13.753502Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:13.917255Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:13.917286Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:13.917293Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:13.917386Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:13.922278Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:14.115633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:14.121494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.162269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.183222Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:14.183238Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:14.183608Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:14.644433Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:15.900667Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047554203, tx_id: 281474976710658 } } } 2026-01-10T12:19:15.900970Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:15.902452Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:15.903560Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047554224 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:15.903831Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:203:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:86:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:89:2057] recipient: [16:88:2118] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:91:2057] recipient: [16:88:2118] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:90:2119] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:206:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:86:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:89:2057] recipient: [17:88:2118] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:91:2057] recipient: [17:88:2118] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:90:2119] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:206:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:87:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:90:2057] recipient: [18:89:2118] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:92:2057] recipient: [18:89:2118] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:91:2119] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:207:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:90:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:93:2057] recipient: [19:92:2121] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:95:2057] recipient: [19:92:2121] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:94:2122] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:210:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:90:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:93:2057] recipient: [20:92:2121] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:95:2057] recipient: [20:92:2121] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:94:2122] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:210:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:91:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:94:2057] recipient: [21:93:2121] Leader for TabletID 72057594037927937 is [21:95:2122] sender: [21:96:2057] recipient: [21:93:2121] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TBlobStorageProxyTest::TestEmptyDiscover >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2026-01-10T12:19:12.234189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706415229852305:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:12.234580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:12.392774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:12.392849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:12.438062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:12.442353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:12.478374Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706415229852270:2081] 1768047552232815 != 1768047552232818 2026-01-10T12:19:12.479251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:12.601930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:12.618337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:12.618362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:12.618388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:12.618522Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:12.781126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:12.787759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:12.873629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:12.901430Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:12.901447Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:12.901952Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:13.239561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:14.438449Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047552866, tx_id: 281474976710658 } } } 2026-01-10T12:19:14.438720Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:14.481875Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:14.488950Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047552943 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:14.489152Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication mode mismatch: expected: REPLICATION_MODE_READ_ONLY, got: 0 2026-01-10T12:19:14.968830Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706422937589850:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:14.969463Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:15.032662Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:15.049401Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:15.084787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:15.084869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:15.090940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:15.236749Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:15.236769Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:15.236775Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:15.236879Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:15.284489Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:15.450310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:15.456267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.496701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.520706Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:15.520728Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:15.521108Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:15.976240Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:17.324386Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047555519, tx_id: 281474976710658 } } } 2026-01-10T12:19:17.324629Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:17.325851Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:17.326631Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047555561 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:17.326790Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2026-01-10T12:19:14.965081Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003057/r3tmp/tmpvdm6Am//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:14.967071Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:90:2057] recipient: [8:89:2119] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:92:2057] recipient: [8:89:2119] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:91:2120] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:207:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:91:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:94:2057] recipient: [9:93:2123] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:96:2057] recipient: [9:93:2123] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:95:2124] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:211:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:94:2057] recipient: [10:93:2123] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:96:2057] recipient: [10:93:2123] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:95:2124] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:211:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:96:2057] recipient: [11:95:2125] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:98:2057] recipient: [11:95:2125] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:97:2126] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:213:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:96:2057] recipient: [12:95:2125] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:98:2057] recipient: [12:95:2125] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:97:2126] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:213:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:96:2057] recipient: [13:95:2125] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:98:2057] recipient: [13:95:2125] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:97:2126] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:213:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:98:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:101:2057] recipient: [14:100:2129] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:103:2057] recipient: [14:100:2129] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:102:2130] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:218:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:102:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:105:2057] recipient: [15:104:2133] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:107:2057] recipient: [15:104:2133] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:106:2134] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:222:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:102:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:105:2057] recipient: [16:104:2133] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:107:2057] recipient: [16:104:2133] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:106:2134] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:104:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:107:2057] recipient: [17:106:2135] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:109:2057] recipient: [17:106:2135] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:108:2136] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:224:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:104:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:107:2057] recipient: [18:106:2135] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:109:2057] recipient: [18:106:2135] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:108:2136] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> DstCreator::ColumnTypeMismatch [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2026-01-10T12:19:13.344924Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706417915461756:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:13.345411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:13.527826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:13.530146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:13.530222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:13.547562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:13.598740Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706417915461712:2081] 1768047553343298 != 1768047553343301 2026-01-10T12:19:13.599315Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:13.706407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:13.820148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:13.820167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:13.820187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:13.820266Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:14.067433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:14.074708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.161133Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:14.161159Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:14.161556Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:14.353194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:15.626088Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047554168, tx_id: 281474976715658 } } } 2026-01-10T12:19:15.626434Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:15.628118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.628863Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-10T12:19:15.628879Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-10T12:19:15.649220Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-10T12:19:15.649250Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:16.117682Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706434586515698:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:16.117771Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:16.129771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:16.191698Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706434586515661:2081] 1768047556116302 != 1768047556116305 2026-01-10T12:19:16.192902Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:16.225598Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:16.225663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:16.227971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:16.351962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:16.377277Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:16.377293Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:16.377297Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:16.377352Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:16.570986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:16.577814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:16.609885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:16.641448Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:16.641465Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:16.641954Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:17.124637Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:18.585767Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047556646, tx_id: 281474976715658 } } } 2026-01-10T12:19:18.585988Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:18.624996Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:18.627495Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1768047556681 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:18.627748Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TBlobStorageProxyTest::TestProxyLongTailDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2026-01-10T12:18:21.268442Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706196169071674:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:21.285203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:21.355208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:21.388171Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:21.474075Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:18:21.474220Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706197197037380:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:21.474683Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:21.474838Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:21.745155Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:21.756797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:21.856796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:21.857014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:21.862494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:21.862582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:21.876314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:21.884621Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:18:21.885584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:21.949140Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:21.960944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:22.019254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:22.296929Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:22.297489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003b3f/r3tmp/yandexUTkonb.tmp 2026-01-10T12:18:22.297506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b3f/r3tmp/yandexUTkonb.tmp 2026-01-10T12:18:22.297644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b3f/r3tmp/yandexUTkonb.tmp 2026-01-10T12:18:22.297723Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:22.323762Z INFO: TTestServer started on Port 17308 GrpcPort 14892 2026-01-10T12:18:22.375052Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:22.438272Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:14892 2026-01-10T12:18:22.548610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:22.596285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:18:22.695783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.140213Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706214376906788:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:25.140320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:25.140653Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706214376906802:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:25.140691Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:25.140777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706214376906800:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:25.150724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:25.174582Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706214376906804:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-10T12:18:25.242281Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706214376906831:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:25.777459Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706213348942322:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:25.777348Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706214376906845:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:25.781470Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzVjNDNiZjctNDRmNGEwYTQtOWNmOTUzN2QtNDI0NDQzNzk=, ActorId: [1:7593706213348942267:2328], ActorState: ExecuteState, LegacyTraceId: 01kekxfvtddjewdt6nzscm3ys6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:25.781476Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTM0ZDhhNDgtNTRlNjkwNGMtNzM0N2Y1YmYtZTI4OGIyYzk=, ActorId: [2:7593706214376906786:2305], ActorState: ExecuteState, LegacyTraceId: 01kekxfvqgehh804cescb9s12r, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:25.785093Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 ... ode 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b3f/r3tmp/yandexZoyQJD.tmp 2026-01-10T12:19:08.215735Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b3f/r3tmp/yandexZoyQJD.tmp 2026-01-10T12:19:08.215837Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:08.234680Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:08.264256Z INFO: TTestServer started on Port 29909 GrpcPort 1809 PQClient connected to localhost:1809 2026-01-10T12:19:08.473696Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:08.501297Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:09.005018Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:09.015728Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:11.423017Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593706411541861216:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.423080Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593706411541861208:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.423216Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.423547Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593706411541861223:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.423603Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.426350Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:19:11.431961Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593706411541861259:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.432041Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:11.442821Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593706411541861222:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2026-01-10T12:19:11.462735Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.478379Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7593706409559513130:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:19:11.478721Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=N2RkZTNhM2UtNjAxZDdkOS05YTMxNjZkYS1jMTc1MjE4OQ==, ActorId: [8:7593706409559513106:2305], ActorState: ExecuteState, LegacyTraceId: 01kekxh8yv3d6j12kq9r5gk1p5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:19:11.479039Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:19:11.513716Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7593706411541861468:3163] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:19:11.515864Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:11.545145Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [7:7593706411541861490:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:19:11.545503Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=7&id=YmY0MzZiMDEtODU5YTU1MzktNTExYzNiZDItYzRhYjE4NjY=, ActorId: [7:7593706411541861206:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxh8xy3nhw30czj5fr390g, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:19:11.545850Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:19:11.618047Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [7:7593706411541861770:3389] 2026-01-10T12:19:12.999612Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7593706394361990646:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:12.999711Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:13.011373Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7593706396674610835:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:13.011470Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-10T12:19:18.203055Z node 7 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [7:7593706441606633185:3587] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-10T12:19:18.203093Z node 7 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [7:7593706441606633185:3587] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> TBlobStorageProxyTest::TestBlockPersistence |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2026-01-10T12:19:15.022933Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:19:15.023503Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af1/r3tmp/tmpC8dbq7/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:19:15.024124Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002af1/r3tmp/tmpC8dbq7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002af1/r3tmp/tmpC8dbq7/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8971312945304232602 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:19:15.072952Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:19:15.073247Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-10T12:19:15.085566Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:469:2102] with ResourceBroker at [2:439:2101] 2026-01-10T12:19:15.085662Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:470:2103] 2026-01-10T12:19:15.085696Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:467:2347] with ResourceBroker at [1:438:2328] 2026-01-10T12:19:15.085737Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:468:2348] 2026-01-10T12:19:15.085820Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:19:15.085847Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:19:15.085870Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-10T12:19:15.085922Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-10T12:19:15.086033Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:15.096848Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047555 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:15.097065Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:15.097129Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047555 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:15.097351Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:19:15.097425Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-10T12:19:15.097494Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:19:15.097515Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:15.097595Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047555 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:15.097779Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-10T12:19:15.097806Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:15.097864Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047555 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:15.098280Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-10T12:19:15.098348Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:19:15.098623Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:19:15.098892Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-10T12:19:15.099007Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-10T12:19:15.099083Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:19:15.099216Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:19:15.099362Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:19:15.099443Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:19:16.057616Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:19:16.057748Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:19:16.057905Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:19:16.057986Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.058044Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:19:16.058083Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.058128Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:19:16.058456Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:19:16.058560Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:467:2347]) priority=0 resources={0, 100} 2026-01-10T12:19:16.058598Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.058644Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:467:2347]) from queue queue_kqp_resource_manager 2026-01-10T12:19:16.058680Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:467:2347]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.058718Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:467:2347])) 2026-01-10T12:19:16.058811Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:19:16.058890Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:16.059027Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047556 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-10T12:19:16.059361Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:19:16.343738Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:19:16.343917Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [2:469:2102]) priority=0 resources={0, 100} 2026-01-10T12:19:16.343979Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [2:469:2102]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.344038Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:469:2102]) from queue queue_kqp_resource_manager 2026-01-10T12:19:16.344117Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [2:469:2102]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.344169Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:469:2102])) 2026-01-10T12:19:16.344289Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:19:16.344351Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-2-2 (2 by [2:469:2102]) priority=0 resources={0, 100} 2026-01-10T12:19:16.344456Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-2-2 (2 by [2:469:2102]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.344506Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:469:2102]) from queue queue_kqp_resource_manager 2026-01-10T12:19:16.344563Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-2-2 (2 by [2:469:2102]) to queue queue_kqp_resource_manager 2026-01-10T12:19:16.344599Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:469:2102])) 2026-01-10T12:19:16.344673Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-10T12:19:16.344762Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:16.344891Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047557 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-10T12:19:16.345223Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:19:16.638775Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:19:16.638892Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:19:16.638940Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:467:2347])) 2026-01-10T12:19:16.638993Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-10T12:19:16.639044Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:19:16.639079Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:467:2347]) (release resources {0, 100}) 2026-01-10T12:19:16.639119Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:467:2347])) 2026-01-10T12:19:16.639146Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:19:16.639188Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:16.639294Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047558 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:16.639635Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-10T12:19:16.922982Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-10T12:19:16.923124Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [2:469:2102]) (release resources {0, 100}) 2026-01-10T12:19:16.923190Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:469:2102])) 2026-01-10T12:19:16.923256Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-10T12:19:16.923311Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:19:16.923362Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-2-2 (2 by [2:469:2102]) (release resources {0, 100}) 2026-01-10T12:19:16.923410Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:469:2102])) 2026-01-10T12:19:16.923446Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-10T12:19:16.923551Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-10T12:19:16.923751Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1768047559 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-10T12:19:16.924121Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-10T12:19:20.062637Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2026-01-10T12:19:14.303835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706424828246031:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:14.303939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:14.472189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:14.477000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:14.477072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:14.510720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:14.527426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:14.528787Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706424828246002:2081] 1768047554302421 != 1768047554302424 2026-01-10T12:19:14.708318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:14.708341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:14.708347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:14.708438Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:14.752134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:14.921191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:14.928436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.017613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.049666Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:15.049703Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:15.050291Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:15.310928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:16.474048Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047555015, tx_id: 281474976715658 } } } 2026-01-10T12:19:16.474464Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:16.515286Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:16.518997Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1768047555085 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:16.519307Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Columns size mismatch: expected: 2, got: 3 2026-01-10T12:19:17.072811Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706435720822277:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:17.072850Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:17.079926Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:17.134774Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:17.141066Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706435720822240:2081] 1768047557071881 != 1768047557071884 2026-01-10T12:19:17.144802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:17.148177Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:17.148256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:17.176818Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:17.350070Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:17.350091Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:17.350098Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:17.350190Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:17.412191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:17.541514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:17.548408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.574669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.601104Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:19:17.601124Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:19:17.601516Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:19:18.111804Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:19.592527Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047557612, tx_id: 281474976710658 } } } 2026-01-10T12:19:19.592780Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:19:19.594126Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-10T12:19:19.595147Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1768047557640 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:19:19.595372Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TBlobStorageProxyTest::TestGetMultipart |91.6%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestBlock |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:18:38.858937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:18:38.859023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:38.859061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:18:38.859092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:18:38.859118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:18:38.859142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:18:38.859185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:38.859233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:18:38.859872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:38.860117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:18:38.950270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:18:38.950391Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:38.951246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:38.963868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:18:38.964330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:18:38.964541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:18:38.972280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:18:38.972506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:18:38.973112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:18:38.973376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:18:38.976700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:38.976901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:18:38.978203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:18:38.978256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:38.978424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:18:38.978479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:18:38.978520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:18:38.978665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:18:39.099067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.099787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.099887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.099952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.099996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:39.100552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... 44, cookie: 281474976710757 2026-01-10T12:19:20.266129Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-10T12:19:20.266219Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:19:20.266290Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-10T12:19:20.266398Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:19:20.266745Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:19:20.266806Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-10T12:19:20.266955Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-10T12:19:20.266997Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:19:20.267049Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-10T12:19:20.267090Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:19:20.267135Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-10T12:19:20.267214Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:1375:3200] message: TxId: 281474976710757 2026-01-10T12:19:20.267272Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:19:20.267325Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-10T12:19:20.267365Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-10T12:19:20.267505Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:19:20.267553Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-10T12:19:20.267578Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-10T12:19:20.267611Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:19:20.267637Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-10T12:19:20.267660Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-10T12:19:20.267716Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:19:20.268171Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:19:20.268227Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:19:20.268315Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:19:20.268369Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:19:20.268409Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:19:20.271007Z node 16 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-10T12:19:20.271186Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:19:20.960859Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:20.961090Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 293us result status StatusPathDoesNotExist 2026-01-10T12:19:20.961246Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:19:20.961823Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:20.962041Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 249us result status StatusPathDoesNotExist 2026-01-10T12:19:20.962168Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:19:20.962633Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:20.962844Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 225us result status StatusSuccess 2026-01-10T12:19:20.963222Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestCompactifiedWithRetention >> TBlobStorageProxyTest::TestPartialGetBlock >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 30975, MsgBus: 10904 2026-01-10T12:18:07.556945Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706134883714828:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:07.557333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:07.931265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:07.931396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:07.932155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:07.967824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:08.089447Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706134883714792:2081] 1768047487537399 != 1768047487537402 2026-01-10T12:18:08.094250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:08.110398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:08.200677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:18:08.200700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:18:08.200706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:18:08.200814Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:08.648923Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:08.676358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:08.732956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:08.915768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.103404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:09.172067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.242141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706152063585842:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.242270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.242646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706152063585852:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.242714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.621278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.655294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.691433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.733818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.767909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.807261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.838341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.914537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:11.996501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706152063586728:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.996609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.996725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706152063586733:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.996788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706152063586735:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:11.996854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:12.000769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:12.011990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706152063586737:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:18:12.083730Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706156358554084:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:12.560198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706134883714828:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:12.560266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:18:14.416152Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047494437, txId: 281474976710674] shutting down 2026-01-10T12:18:14.760262Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [s ... : kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047552992, txId: 281474976711268] shutting down 2026-01-10T12:19:13.247755Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047553279, txId: 281474976711271] shutting down Trying to start YDB, gRPC: 22540, MsgBus: 23842 2026-01-10T12:19:14.234106Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706425592065314:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:14.234188Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:14.251616Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:14.314103Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:14.349103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:14.349183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:14.353671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:14.392236Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:14.392265Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:14.392275Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:14.392357Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:14.437506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:14.806120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:14.815818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:14.864028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.002388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.067533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:15.309727Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:17.357889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706438476969034:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.357964Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.358132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706438476969043:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.358169Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.435644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.460036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.483940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.511060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.538740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.564975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.594308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.639616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:17.701752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706438476969911:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.701813Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.701828Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706438476969916:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.701999Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706438476969918:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.702032Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:17.704914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:19:17.712559Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706438476969919:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:19:17.780195Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706438476969971:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:19:19.234348Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706425592065314:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:19.234410Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:20.211426Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047560237, txId: 281474976710673] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] Test command err: 2026-01-10T12:18:33.235251Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:134:2057] recipient: [1:132:2164] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:134:2057] recipient: [1:132:2164] Leader for TabletID 72057594037927937 is [1:138:2168] sender: [1:139:2057] recipient: [1:132:2164] 2026-01-10T12:18:33.314325Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:33.314415Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:33.314486Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:33.314586Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:180:2057] recipient: [1:178:2198] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:180:2057] recipient: [1:178:2198] Leader for TabletID 72057594037927938 is [1:184:2202] sender: [1:185:2057] recipient: [1:178:2198] Leader for TabletID 72057594037927937 is [1:138:2168] sender: [1:210:2057] recipient: [1:14:2061] 2026-01-10T12:18:33.338913Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:33.365676Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:208:2220] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:33.366998Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2168] 2026-01-10T12:18:33.370070Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:216:2168] 2026-01-10T12:18:33.372587Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:217:2168] 2026-01-10T12:18:33.374759Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:217:2168] 2026-01-10T12:18:33.385277Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:33.385818Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|be6105e-d6618937-b583a8fc-89ec9965_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:33.394385Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:33.394882Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|cd432ee8-8c06c683-397f4f84-1616c8b7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:33.401735Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:33.402220Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|533fcfbe-73eb2e6a-1e564917-12f3ed10_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 2026-01-10T12:18:33.419401Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.450540Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.481830Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.523492Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.534129Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.596114Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.691235Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.722446Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.856566Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:33.908425Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:34.074432Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:34.106532Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:34.334966Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", ... let_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.604172Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.749797Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:09.988810Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.123251Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.259324Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.491083Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.739289Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:10.966474Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.278631Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.289298Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.497284Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.724822Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:11.952468Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:12.222250Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:12.346947Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:12.493231Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:12.680684Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:12.878553Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:13.129025Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:13.337014Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:13.420287Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:13.649432Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:13.888498Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:14.116763Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:14.376520Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:14.397534Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:14.605726Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:14.845244Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:15.084310Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:15.158670Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][topic] pipe [6:434:2380] connected; active server actors: 1 2026-01-10T12:19:15.484471Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:15.587745Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:15.754639Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:16.024773Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:16.284060Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:16.553964Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:16.720742Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:16.824228Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:17.084400Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:17.333313Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:17.625088Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:17.914758Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:17.966772Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:18.183569Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:18.443727Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:18.735283Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:19.015758Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:19.181777Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:19.276096Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:19.516389Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:19.789464Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:20.060535Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:20.279668Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:20.311068Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:20.550016Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:20.789214Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:21.060586Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:21.310685Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:21.394743Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:21.551468Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob |91.6%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestReadRequestInFlightLimit |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TPQTest::TestCompactifiedWithRetention [GOOD] >> TPQTest::TestChangeConfig |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:78:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:81:2057] recipient: [13:80:2112] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:83:2057] recipient: [13:80:2112] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:82:2113] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:198:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:78:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:81:2057] recipient: [14:80:2112] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:83:2057] recipient: [14:80:2112] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:82:2113] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:198:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:79:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:82:2057] recipient: [15:81:2112] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:84:2057] recipient: [15:81:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:83:2113] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:199:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:82:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:85:2057] recipient: [16:84:2115] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:87:2057] recipient: [16:84:2115] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:86:2116] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:202:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:85:2057] recipient: [17:84:2115] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:87:2057] recipient: [17:84:2115] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:86:2116] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:202:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:83:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:86:2057] recipient: [18:85:2115] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:88:2057] recipient: [18:85:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:87:2116] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:88:2057] recipient: [19:87:2117] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:90:2057] recipient: [19:87:2117] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:89:2118] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:205:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:88:2057] recipient: [20:87:2117] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:90:2057] recipient: [20:87:2117] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:89:2118] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:205:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2117] Leader for TabletID 72057594037927937 is [21:90:2118] sender: [21:91:2057] recipient: [21:88:2117] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:84:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2119] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:91:2057] recipient: [9:88:2119] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2120] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2119] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:91:2057] recipient: [10:88:2119] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2120] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2121] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:93:2057] recipient: [11:90:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2122] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2121] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:93:2057] recipient: [12:90:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2122] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:90:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:92:2123] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:95:2057] recipient: [13:92:2123] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:94:2124] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:210:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:90:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:93:2057] recipient: [14:92:2123] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:95:2057] recipient: [14:92:2123] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:94:2124] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:210:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2123] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:96:2057] recipient: [15:93:2123] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2124] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:93:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:96:2057] recipient: [16:95:2125] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:98:2057] recipient: [16:95:2125] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:97:2126] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:213:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:93:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:96:2057] recipient: [17:95:2125] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:98:2057] recipient: [17:95:2125] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:97:2126] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:213:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:94:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:97:2057] recipient: [18:96:2125] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:99:2057] recipient: [18:96:2125] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:98:2126] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:214:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:85:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:88:2057] recipient: [22:87:2117] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:90:2057] recipient: [22:87:2117] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:89:2118] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:205:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:88:2057] recipient: [23:87:2117] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:90:2057] recipient: [23:87:2117] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:89:2118] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:205:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBlobStorageProxyTest::TestQuadrupleGroups >> Cdc::AddStream [GOOD] >> Cdc::DisableStream >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TPQTest::TestChangeConfig [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: 2026-01-10T12:18:30.215062Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:30.282636Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:30.286404Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:30.286731Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:30.286797Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:30.286863Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:30.286902Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:30.286965Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.287070Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-10T12:18:30.304136Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:183:2196], now have 1 active actors on pipe 2026-01-10T12:18:30.304274Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:30.322907Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:30.325497Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:30.325653Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:30.326408Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:30.326583Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:30.327077Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:30.327458Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-10T12:18:30.329771Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:30.329823Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:30.329859Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-10T12:18:30.329903Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:30.329978Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:30.331517Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:30.332758Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:30.332806Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.332844Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.332884Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.332913Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:30.332943Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.332979Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.333038Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:30.333091Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:30.333122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:30.333153Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-10T12:18:30.333187Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-10T12:18:30.333214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:18:30.333259Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:18:30.333300Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.333532Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:30.333564Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:30.333622Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:30.333818Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:30.333978Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:30.336551Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:30.336640Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:30.336691Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:30.336728Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.336768Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:30.336814Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:30.336845Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:30.336884Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:30.337188Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2204], now have 1 active actors on pipe 2026-01-10T12:18:30.347786Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2206], now have 1 active actors on pipe 2026-01-10T12:18:30.347948Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:30.348002Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topi ... 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 45 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 44 } Consumers { Name: "bbb" Generation: 45 Important: true } Consumers { Name: "ccc" Generation: 45 Important: true } 2026-01-10T12:19:25.510592Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:25.511056Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b2ede9ea-5d516f7-a2fa876-9d7369cc_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:25.517798Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:25.518392Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5285ace8-a29ea090-f58f11f5-dd288a7f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:25.531507Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:25.532062Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|cb7962aa-eaad3d22-135444e9-bcac964a_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:25.938652Z node 41 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 41 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:112:2057] recipient: [41:105:2138] 2026-01-10T12:19:25.996568Z node 41 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:25.996612Z node 41 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:25.996654Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:25.996700Z node 41 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927938 is [41:157:2176] sender: [41:158:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:183:2057] recipient: [41:14:2061] 2026-01-10T12:19:26.011343Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:26.012945Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 46 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 46 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "aaa" Generation: 46 Important: true } 2026-01-10T12:19:26.013907Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [41:189:2142] 2026-01-10T12:19:26.015829Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [41:189:2142] 2026-01-10T12:19:26.017987Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [41:190:2142] 2026-01-10T12:19:26.019685Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [41:190:2142] 2026-01-10T12:19:26.021479Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [41:191:2142] 2026-01-10T12:19:26.022914Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [41:191:2142] 2026-01-10T12:19:26.024676Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [41:192:2142] 2026-01-10T12:19:26.026116Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [41:192:2142] 2026-01-10T12:19:26.027823Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [41:193:2142] 2026-01-10T12:19:26.029234Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [41:193:2142] 2026-01-10T12:19:26.037443Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:26.037707Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bcbc8e28-cba83570-86ea8886-95f08dbb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:26.043649Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:26.048857Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][5][StateInit] bootstrapping 5 [41:237:2142] 2026-01-10T12:19:26.050387Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][5][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [41:237:2142] 2026-01-10T12:19:26.053486Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][6][StateInit] bootstrapping 6 [41:238:2142] 2026-01-10T12:19:26.055089Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][6][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [41:238:2142] 2026-01-10T12:19:26.057836Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][7][StateInit] bootstrapping 7 [41:239:2142] 2026-01-10T12:19:26.059241Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][7][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [41:239:2142] 2026-01-10T12:19:26.062387Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][8][StateInit] bootstrapping 8 [41:240:2142] 2026-01-10T12:19:26.063759Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][8][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [41:240:2142] 2026-01-10T12:19:26.066465Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][9][StateInit] bootstrapping 9 [41:241:2142] 2026-01-10T12:19:26.068027Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][9][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [41:241:2142] 2026-01-10T12:19:26.087802Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 47 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "bbb" Generation: 47 Important: true } Consumers { Name: "ccc" Generation: 47 Important: true } 2026-01-10T12:19:26.089374Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:26.089688Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9d653b8f-7a34cb3f-2cb33c21-a09d9ee6_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:26.093885Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:26.094150Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|49b6b54a-bdb666b3-c8bdaa19-4efd7a11_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:26.101847Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:26.102177Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9b74e943-5e3fa924-f3340410-5e2c3134_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestNormal >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:89:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2117] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:108:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:109:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:78:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:81:2057] recipient: [15:80:2112] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:83:2057] recipient: [15:80:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:82:2113] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:198:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:79:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:84:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:83:2113] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:199:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:85:2057] recipient: [18:84:2115] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:87:2057] recipient: [18:84:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:86:2116] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:202:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:83:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:86:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:88:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:87:2116] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:203:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2118] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:91:2057] recipient: [21:88:2118] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2119] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:206:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:87:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:90:2057] recipient: [23:89:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:89:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:207:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds >> SlowTopicAutopartitioning::CDC_Write >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> TPQTestSlow::TestWriteVeryBigMessage |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TFlatTest::Init >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2026-01-10T12:19:25.211460Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003043/r3tmp/tmpnEq0Lz//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:25.212379Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003043/r3tmp/tmpnEq0Lz//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-10T12:19:25.221769Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:25.222256Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:28.384687Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003043/r3tmp/tmpe1c4l1//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:28.385095Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003043/r3tmp/tmpe1c4l1//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-10T12:19:28.396447Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:28.396542Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TFlatTest::SplitEmptyAndWrite >> TLocksTest::Range_BrokenLock0 |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TLocksTest::Range_CorrectNullDot >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQReadAhead >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TLocksTest::BrokenSameKeyLock |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TLocksFatTest::PointSetNotBreak >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TFlatTest::LargeDatashardReplyDistributed >> TPQTest::TestManyConsumers [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2026-01-10T12:19:23.630815Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:23.644585Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:25.168106Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-10T12:19:25.181362Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:26.620530Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-10T12:19:26.632198Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:28.275989Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 4 2026-01-10T12:19:28.281040Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:29.825015Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 5 2026-01-10T12:19:29.827894Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:31.452792Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/003054/r3tmp/tmpG19Yon//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 6 2026-01-10T12:19:31.454903Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TLocksTest::GoodSameKeyLock |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: 2026-01-10T12:18:25.979485Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:26.053452Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:26.057932Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:26.058211Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.058263Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.058339Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:26.058411Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:26.058471Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.058526Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:26.077240Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-10T12:18:26.077330Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:26.092244Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:26.095637Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:26.095816Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.096664Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:26.096825Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.097229Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:26.097567Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:26.099623Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:26.099671Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:26.099707Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:26.099750Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:26.099806Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:26.101222Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.102194Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:26.102250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:26.102288Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.102321Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.102360Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:26.102406Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.102452Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.102512Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:26.102547Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:26.102573Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.102594Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-10T12:18:26.102613Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-10T12:18:26.102632Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:18:26.102691Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:18:26.103057Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:26.103314Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.103346Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.103405Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:26.103571Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:26.103746Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:26.105967Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:26.106058Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:26.106097Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:26.106127Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:26.106156Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.106195Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:26.106231Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:26.106274Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:26.106543Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:196:2203], now have 1 active actors on pipe 2026-01-10T12:18:26.120156Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:199:2205], now have 1 active actors on pipe 2026-01-10T12:18:26.120288Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:18:26.120354Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1-- ... : 72057594037927937] server connected, pipe [28:1000:2990], now have 1 active actors on pipe 2026-01-10T12:19:31.834785Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:31.853434Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:31.923190Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1003:2993], now have 1 active actors on pipe 2026-01-10T12:19:31.925385Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:31.946088Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:31.976202Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1006:2996], now have 1 active actors on pipe 2026-01-10T12:19:31.978212Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.000067Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.033924Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1009:2999], now have 1 active actors on pipe 2026-01-10T12:19:32.036454Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.058969Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.090931Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1012:3002], now have 1 active actors on pipe 2026-01-10T12:19:32.093013Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.113186Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.159988Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1015:3005], now have 1 active actors on pipe 2026-01-10T12:19:32.162126Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.180934Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.211424Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1018:3008], now have 1 active actors on pipe 2026-01-10T12:19:32.213345Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.228696Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.261393Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1021:3011], now have 1 active actors on pipe 2026-01-10T12:19:32.263734Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.283887Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.310449Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1024:3014], now have 1 active actors on pipe 2026-01-10T12:19:32.312642Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.326498Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.356435Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1027:3017], now have 1 active actors on pipe 2026-01-10T12:19:32.358618Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.373525Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.428770Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1030:3020], now have 1 active actors on pipe 2026-01-10T12:19:32.431046Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.452573Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.474902Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1033:3023], now have 1 active actors on pipe 2026-01-10T12:19:32.477097Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.490660Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.517360Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1036:3026], now have 1 active actors on pipe 2026-01-10T12:19:32.519323Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.535332Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:19:32.561511Z node 28 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [28:1039:3029] connected; active server actors: 1 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TFlatTest::WriteSplitKillRead >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> Cdc::DisableStream [GOOD] >> Cdc::AwsRegion >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TLocksTest::Range_IncorrectNullDot1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2026-01-10T12:19:32.525137Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/00301f/r3tmp/tmpdXXpyk//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:32.525733Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/00301f/r3tmp/tmpdXXpyk//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-10T12:19:32.541728Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-10T12:19:32.541985Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TFlatTest::PathSorting >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> TFlatTest::WriteSplitByPartialKeyAndRead >> TFlatTest::CopyTableAndCompareColumnsSchema >> TObjectStorageListingTest::CornerCases >> TFlatTest::SplitBoundaryRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2026-01-10T12:19:33.635014Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/00303d/r3tmp/tmpaJ9oqk//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-10T12:19:33.637655Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TFlatTest::ReadOnlyMode >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-10T12:18:37.595366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:18:37.595477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:37.595532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:18:37.595568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:18:37.595601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:18:37.595631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:18:37.595675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:18:37.595737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:18:37.596400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:37.596624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:18:37.697280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:18:37.697374Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:37.698032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:18:37.710530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:18:37.711016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:18:37.711230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:18:37.719518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:18:37.719731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:18:37.720458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:18:37.720733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:18:37.724013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:37.724226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:18:37.725466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:18:37.725528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:18:37.725716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:18:37.725769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:18:37.725809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:18:37.725949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:18:37.887803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.888782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.888936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:18:37.889946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... 83647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "3_continuousBackupImpl" TopicPath: "/MyRoot/Table/3_continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409555 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409556 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 50 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:19:35.872808Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:35.873115Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl1" took 336us result status StatusSuccess 2026-01-10T12:19:35.873749Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl1" PathDescription { Self { Name: "IncrBackupImpl1" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:19:35.874867Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:35.875069Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl2" took 220us result status StatusSuccess 2026-01-10T12:19:35.875464Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl2" PathDescription { Self { Name: "IncrBackupImpl2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:19:35.876498Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:19:35.876697Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl3" took 222us result status StatusSuccess 2026-01-10T12:19:35.877182Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl3" PathDescription { Self { Name: "IncrBackupImpl3" PathId: 49 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 49 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TFlatTest::SplitEmptyToMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-10T12:19:28.479147Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:19:28.556997Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:28.557104Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:28.557190Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:28.557268Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:19:28.590929Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-10T12:19:28.591051Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:19:28.618668Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-10T12:19:28.618919Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:28.620590Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-10T12:19:28.620756Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:19:28.620830Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:19:28.621417Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:19:28.621841Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:19:28.624683Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:19:28.624748Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:19:28.624797Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:19:28.624864Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:19:28.626577Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:19:28.628051Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:19:28.628130Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:28.628170Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:19:28.628236Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:19:28.628274Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:28.628351Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:19:28.628430Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:19:28.628471Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:19:28.628505Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:19:28.628542Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:19:28.628608Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:19:28.628898Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:19:28.628942Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-10T12:19:28.629003Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:19:28.629213Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:19:28.629488Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:19:28.631481Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:19:28.631529Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-10T12:19:28.631566Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:19:28.631608Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:19:28.632799Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:19:28.633891Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:19:28.633939Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:19:28.633971Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:19:28.634012Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:19:28.634043Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:28.634082Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:19:28.634138Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-10T12:19:28.634184Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:19:28.634215Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:19:28.634242Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-10T12:19:28.634274Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-10T12:19:28.634464Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:19:28.634501Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-10T12:19:28.634563Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:19:28.634811Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:19:28.635042Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:19:28.635249Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:19:28.635368Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-10T12:19:37.479826Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.479862Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.479893Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.479924Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.479961Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.479994Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.480026Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.480059Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.480109Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-10T12:19:37.480248Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-10T12:19:37.480299Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-10T12:19:37.480342Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:0 isTruncatedBlob 1 2026-01-10T12:19:37.500405Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:0 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-10T12:19:37.500522Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 3:16 2026-01-10T12:19:37.500876Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 28 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 3 partno 16 count 4294967295 size 4294967295 endOffset 4 max time lag 0ms effective offset 3 2026-01-10T12:19:37.501307Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 28 added 2 blobs, size 12781161 count 1 last offset 3, current partition end offset: 4 2026-01-10T12:19:37.501350Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 28. Send blob request. 2026-01-10T12:19:37.501443Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 16 count 0 parts_count 16 source 0 size 8191635 accessed 1 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:37.501495Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 32 count 1 parts_count 8 source 1 size 4589526 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:37.501543Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 28. All 2 blobs are from cache. 2026-01-10T12:19:37.501627Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2026-01-10T12:19:37.501674Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2026-01-10T12:19:37.501746Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-10T12:19:37.501888Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-10T12:19:37.502036Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502071Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502102Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502135Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502168Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502200Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502230Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502262Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-10T12:19:37.502293Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-10T12:19:37.502403Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-10T12:19:37.502436Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-10T12:19:37.502475Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:16 isTruncatedBlob 1 2026-01-10T12:19:37.516840Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:16 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 1 2026-01-10T12:19:37.542993Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00032_0000000001_00015 2026-01-10T12:19:37.543142Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-10T12:19:37.543396Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-10T12:19:37.543459Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:19:37.543586Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 0 size 271 2026-01-10T12:19:37.543678Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 32 count 1 size 187 2026-01-10T12:19:37.543760Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 0 size 8191590 2026-01-10T12:19:37.543800Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000002_00016_0000000000_00016(+) to d0000000000_00000000000000000002_00016_0000000000_00016(+) 2026-01-10T12:19:37.543832Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00032_0000000001_00015(+) to d0000000000_00000000000000000000_00032_0000000001_00015(+) 2026-01-10T12:19:37.564805Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 0 size 271 actorID [3:139:2142] 2026-01-10T12:19:37.564878Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 2 count 1 size 4589526 actorID [3:139:2142] is actual 1 2026-01-10T12:19:37.564951Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 187 actorID [3:139:2142] 2026-01-10T12:19:37.564982Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 0 size 8191635 actorID [3:139:2142] is actual 1 2026-01-10T12:19:37.565037Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 0 size 8191590 actorID [3:139:2142] 2026-01-10T12:19:37.565115Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 4589526 2026-01-10T12:19:37.566549Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191635 2026-01-10T12:19:37.573987Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 0 parts 16 suffix '0' size 271 2026-01-10T12:19:37.574069Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 187 2026-01-10T12:19:37.574103Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191590 2026-01-10T12:19:37.574536Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:19:37.574575Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-10T12:19:37.574606Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-10T12:19:37.583239Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:402:2377], now have 1 active actors on pipe 2026-01-10T12:19:37.583364Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:19:37.583401Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:19:37.583508Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 3 for user __ydb_compaction_consumer 2026-01-10T12:19:37.583836Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:404:2379], now have 1 active actors on pipe Got start offset = 2 |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2026-01-10T12:19:31.114093Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706497145939155:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:31.114192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:31.323679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:31.342101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:31.342193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:31.393951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:31.438419Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706497145939127:2081] 1768047571112694 != 1768047571112697 2026-01-10T12:19:31.446858Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:31.477408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:31.615299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:31.691851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:31.822667Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:31.826743Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:31.849838Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:31.853626Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:31.945821Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:31.946062Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:31.946226Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:31.946386Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:31.947298Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2026-01-10T12:19:31.950353Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.32, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-10T12:19:32.043233Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:19:32.043292Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:19:32.049768Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-10T12:19:32.049803Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:19:32.049815Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-10T12:19:34.361435Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706508259084376:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.361500Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:34.368028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:34.431460Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:34.432524Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706508259084349:2081] 1768047574360823 != 1768047574360826 2026-01-10T12:19:34.442654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:34.442723Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:34.462975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:34.466089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.588010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:34.607145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.666952Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:34.670373Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:34.688099Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:34.691332Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:34.710298Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 2026-01-10T12:19:34.817982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } } } TxId: 281474976710678 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:34.818140Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710678:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-10T12:19:34.818278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:34.818331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:34.818435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-10T12:19:34.818458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710678:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-10T12:19:34.818588Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710678:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000g+\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000g+\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 4 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-10T12:19:34.818616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710678:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:34.819169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710678, response: Status: StatusAccepted TxId: 281474976710678 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:19:34.819268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE ... msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2026-01-10T12:19:34.823854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2026-01-10T12:19:34.823918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037890 2026-01-10T12:19:34.823932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037891 2026-01-10T12:19:34.843510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037890 2026-01-10T12:19:34.843572Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037890 2026-01-10T12:19:34.843895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.845257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037891 2026-01-10T12:19:34.845298Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037891 2026-01-10T12:19:34.845319Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710678:0 3 -> 131 2026-01-10T12:19:34.845590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.845693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.845721Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:19:34.845748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 281474976710678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976710678:0 at tablet 72057594046644480 2026-01-10T12:19:34.845974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2026-01-10T12:19:34.846043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037888 2026-01-10T12:19:34.848230Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:34.848284Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:34.848471Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:34.848546Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:34.848616Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-10T12:19:34.852364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-10T12:19:34.852430Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-10T12:19:34.852704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710678:0 131 -> 132 2026-01-10T12:19:34.852805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-10T12:19:34.853091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.853211Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:19:34.853233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710678, path id: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:34.853422Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:19:34.853449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7593706508259084865:2246], at schemeshard: 72057594046644480, txId: 281474976710678, path id: 39 2026-01-10T12:19:34.853489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.853510Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:19:34.853533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976710678 at tablet 72057594046644480 2026-01-10T12:19:34.855322Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2026-01-10T12:19:34.855419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2026-01-10T12:19:34.855433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710678 2026-01-10T12:19:34.855449Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710678, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 4 2026-01-10T12:19:34.855469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-01-10T12:19:34.855529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2026-01-10T12:19:34.855645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2026-01-10T12:19:34.855762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710678 2026-01-10T12:19:34.857300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2026-01-10T12:19:34.857333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-10T12:19:34.857376Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710678:0 progress is 1/1 2026-01-10T12:19:34.857387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-10T12:19:34.857404Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710678:0 progress is 1/1 2026-01-10T12:19:34.857426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-10T12:19:34.857440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 1/1, is published: true 2026-01-10T12:19:34.857473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7593706508259085488:2347] message: TxId: 281474976710678 2026-01-10T12:19:34.857496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2026-01-10T12:19:34.857511Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710678:0 2026-01-10T12:19:34.857520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710678:0 2026-01-10T12:19:34.857637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-10T12:19:34.857917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2026-01-10T12:19:34.857934Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710678:0 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksFatTest::RangeSetRemove |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] >> TLocksTest::Range_GoodLock0 >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] ... r refreshed! new actor is[22:86:2116] Leader for TabletID 72057594037927937 is [22:86:2116] sender: [22:202:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:82:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:84:2115] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:87:2057] recipient: [23:84:2115] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:86:2116] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:202:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:83:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:85:2115] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:88:2057] recipient: [24:85:2115] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:87:2116] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:203:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:86:2057] recipient: [25:39:2086] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:89:2057] recipient: [25:88:2118] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:91:2057] recipient: [25:88:2118] !Reboot 72057594037927937 (actor [25:58:2099]) rebooted! !Reboot 72057594037927937 (actor [25:58:2099]) tablet resolver refreshed! new actor is[25:90:2119] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:206:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:59:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:76:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:86:2057] recipient: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:89:2057] recipient: [26:88:2118] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:91:2057] recipient: [26:88:2118] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:90:2119] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:206:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:87:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:90:2057] recipient: [27:89:2118] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:92:2057] recipient: [27:89:2118] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:91:2119] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:207:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:90:2057] recipient: [28:39:2086] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:93:2057] recipient: [28:92:2121] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:95:2057] recipient: [28:92:2121] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:94:2122] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:210:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:90:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:93:2057] recipient: [29:92:2121] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:95:2057] recipient: [29:92:2121] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:94:2122] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:210:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:91:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:94:2057] recipient: [30:93:2121] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:96:2057] recipient: [30:93:2121] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:95:2122] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:211:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:93:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:96:2057] recipient: [31:95:2123] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:98:2057] recipient: [31:95:2123] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:97:2124] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:213:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:93:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:214:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:52:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:52:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:54:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:203:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:86:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:89:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:91:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:90:2119] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:206:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:52:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:87:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:90:2057] recipient: [24:89:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:89:2118] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:207:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TFlatTest::WriteSplitWriteSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-10T12:18:55.440482Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:55:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:61:2057] recipient: [3:55:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:78:2057] recipient: [3:17:2064] 2026-01-10T12:18:55.745347Z node 4 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:61:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:78:2057] recipient: [4:17:2064] !Reboot 72057594037927937 (actor [4:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:80:2057] recipient: [4:42:2089] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:83:2057] recipient: [4:82:2114] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:85:2057] recipient: [4:82:2114] !Reboot 72057594037927937 (actor [4:60:2101]) rebooted! !Reboot 72057594037927937 (actor [4:60:2101]) tablet resolver refreshed! new actor is[4:84:2115] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:200:2057] recipient: [4:17:2064] 2026-01-10T12:18:57.620526Z node 5 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:54:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:61:2057] recipient: [5:54:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:78:2057] recipient: [5:17:2064] !Reboot 72057594037927937 (actor [5:60:2101]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:80:2057] recipient: [5:42:2089] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:83:2057] recipient: [5:82:2114] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:85:2057] recipient: [5:82:2114] !Reboot 72057594037927937 (actor [5:60:2101]) rebooted! !Reboot 72057594037927937 (actor [5:60:2101]) tablet resolver refreshed! new actor is[5:84:2115] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:200:2057] recipient: [5:17:2064] 2026-01-10T12:18:59.572204Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:61:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:78:2057] recipient: [6:17:2064] !Reboot 72057594037927937 (actor [6:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:81:2057] recipient: [6:42:2089] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:84:2057] recipient: [6:83:2114] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:86:2057] recipient: [6:83:2114] !Reboot 72057594037927937 (actor [6:60:2101]) rebooted! !Reboot 72057594037927937 (actor [6:60:2101]) tablet resolver refreshed! new actor is[6:85:2115] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:201:2057] recipient: [6:17:2064] 2026-01-10T12:19:01.449887Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:54:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:61:2057] recipient: [7:54:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:78:2057] recipient: [7:17:2064] !Reboot 72057594037927937 (actor [7:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:84:2057] recipient: [7:42:2089] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:60:2101]) rebooted! !Reboot 72057594037927937 (actor [7:60:2101]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:17:2064] 2026-01-10T12:19:03.529814Z node 8 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:61:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:78:2057] recipient: [8:17:2064] !Reboot 72057594037927937 (actor [8:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:84:2057] recipient: [8:42:2089] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:60:2101]) rebooted! !Reboot 72057594037927937 (actor [8:60:2101]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:17:2064] 2026-01-10T12:19:05.634848Z node 9 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:56:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:61:2057] recipient: [9:56:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:78:2057] recipient: [9:17:2064] !Reboot 72057594037927937 (actor [9:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:85:2057] recipient: [9:42:2089] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:60:2101]) rebooted! !Reboot 72057594037927937 (actor [9:60:2101]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:17:2064] 2026-01-10T12:19:07.503618Z node 10 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:61:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:78:2057] recipient: [10:17:2064] !Reboot 72057594037927937 (actor [10:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:88:2057] recipient: [10:42:2089] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:60:2101]) rebooted! !Reboot 72057594037927937 (actor [10:60:2101]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:17:2064] 2026-01-10T12:19:09.385599Z node 11 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:61:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:78:2057] recipient: [11:17:2064] !Reboot 72057594037927937 (actor [11:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:88:2057] recipient: [11:42:2089] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:60:2101]) rebooted! !Reboot 72057594037927937 (actor [11:60:2101]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:17:2064] 2026-01-10T12:19:11.274425Z node 12 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:54:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:61:2057] recipient: [12:54:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:78:2057] recipient: [12:17:2064] !Reboot 72057594037927937 (actor [12:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:89:2057] recipient: [12:42:2089] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:60:2101]) rebooted! !Reboot 72057594037927937 (actor [12:60:2101]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:212 ... pient: [18:42:2089] Leader for TabletID 72057594037927937 is [18:60:2101] sender: [18:100:2057] recipient: [18:99:2126] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:102:2057] recipient: [18:99:2126] !Reboot 72057594037927937 (actor [18:60:2101]) rebooted! !Reboot 72057594037927937 (actor [18:60:2101]) tablet resolver refreshed! new actor is[18:101:2127] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:217:2057] recipient: [18:17:2064] 2026-01-10T12:19:24.903570Z node 19 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:54:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:61:2057] recipient: [19:54:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:78:2057] recipient: [19:17:2064] !Reboot 72057594037927937 (actor [19:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:100:2057] recipient: [19:42:2089] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:103:2057] recipient: [19:102:2129] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:105:2057] recipient: [19:102:2129] !Reboot 72057594037927937 (actor [19:60:2101]) rebooted! !Reboot 72057594037927937 (actor [19:60:2101]) tablet resolver refreshed! new actor is[19:104:2130] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:220:2057] recipient: [19:17:2064] 2026-01-10T12:19:27.014690Z node 20 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:61:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:78:2057] recipient: [20:17:2064] !Reboot 72057594037927937 (actor [20:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:100:2057] recipient: [20:42:2089] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:103:2057] recipient: [20:102:2129] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:105:2057] recipient: [20:102:2129] !Reboot 72057594037927937 (actor [20:60:2101]) rebooted! !Reboot 72057594037927937 (actor [20:60:2101]) tablet resolver refreshed! new actor is[20:104:2130] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:220:2057] recipient: [20:17:2064] 2026-01-10T12:19:29.115673Z node 21 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:56:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:61:2057] recipient: [21:56:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:78:2057] recipient: [21:17:2064] !Reboot 72057594037927937 (actor [21:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:101:2057] recipient: [21:42:2089] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:104:2057] recipient: [21:103:2129] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:106:2057] recipient: [21:103:2129] !Reboot 72057594037927937 (actor [21:60:2101]) rebooted! !Reboot 72057594037927937 (actor [21:60:2101]) tablet resolver refreshed! new actor is[21:105:2130] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:221:2057] recipient: [21:17:2064] 2026-01-10T12:19:31.199918Z node 22 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:61:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:78:2057] recipient: [22:17:2064] !Reboot 72057594037927937 (actor [22:60:2101]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:102:2057] recipient: [22:42:2089] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:105:2057] recipient: [22:104:2130] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:107:2057] recipient: [22:104:2130] !Reboot 72057594037927937 (actor [22:60:2101]) rebooted! !Reboot 72057594037927937 (actor [22:60:2101]) tablet resolver refreshed! new actor is[22:106:2131] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:126:2057] recipient: [22:17:2064] 2026-01-10T12:19:31.513988Z node 23 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:61:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:78:2057] recipient: [23:17:2064] !Reboot 72057594037927937 (actor [23:60:2101]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:103:2057] recipient: [23:42:2089] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:106:2057] recipient: [23:105:2131] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:108:2057] recipient: [23:105:2131] !Reboot 72057594037927937 (actor [23:60:2101]) rebooted! !Reboot 72057594037927937 (actor [23:60:2101]) tablet resolver refreshed! new actor is[23:107:2132] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:127:2057] recipient: [23:17:2064] 2026-01-10T12:19:31.812859Z node 24 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:54:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:61:2057] recipient: [24:54:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:78:2057] recipient: [24:17:2064] !Reboot 72057594037927937 (actor [24:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:106:2057] recipient: [24:42:2089] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:109:2057] recipient: [24:108:2134] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:111:2057] recipient: [24:108:2134] !Reboot 72057594037927937 (actor [24:60:2101]) rebooted! !Reboot 72057594037927937 (actor [24:60:2101]) tablet resolver refreshed! new actor is[24:110:2135] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:226:2057] recipient: [24:17:2064] 2026-01-10T12:19:33.730570Z node 25 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:61:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:78:2057] recipient: [25:17:2064] !Reboot 72057594037927937 (actor [25:60:2101]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:106:2057] recipient: [25:42:2089] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:109:2057] recipient: [25:108:2134] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:111:2057] recipient: [25:108:2134] !Reboot 72057594037927937 (actor [25:60:2101]) rebooted! !Reboot 72057594037927937 (actor [25:60:2101]) tablet resolver refreshed! new actor is[25:110:2135] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:226:2057] recipient: [25:17:2064] 2026-01-10T12:19:35.626623Z node 26 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:61:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:78:2057] recipient: [26:17:2064] !Reboot 72057594037927937 (actor [26:60:2101]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:106:2057] recipient: [26:42:2089] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:109:2057] recipient: [26:108:2134] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:111:2057] recipient: [26:108:2134] !Reboot 72057594037927937 (actor [26:60:2101]) rebooted! !Reboot 72057594037927937 (actor [26:60:2101]) tablet resolver refreshed! new actor is[26:110:2135] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:226:2057] recipient: [26:17:2064] 2026-01-10T12:19:37.716265Z node 27 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:61:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:78:2057] recipient: [27:17:2064] !Reboot 72057594037927937 (actor [27:60:2101]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:111:2057] recipient: [27:42:2089] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:114:2057] recipient: [27:113:2138] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:116:2057] recipient: [27:113:2138] !Reboot 72057594037927937 (actor [27:60:2101]) rebooted! !Reboot 72057594037927937 (actor [27:60:2101]) tablet resolver refreshed! new actor is[27:115:2139] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:231:2057] recipient: [27:17:2064] 2026-01-10T12:19:39.815818Z node 28 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:61:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:78:2057] recipient: [28:17:2064] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TFlatTest::CopyCopiedTableAndRead >> TFlatTest::CopyTableAndRead >> TFlatTest::AutoSplitBySize >> TFlatTest::PartBloomFilter [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::LargeDatashardReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2026-01-10T12:19:35.001763Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706515663592447:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:35.001809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:35.173321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:35.185680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:35.185795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:35.246387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:35.265904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706511368625122:2081] 1768047575000013 != 1768047575000016 2026-01-10T12:19:35.267584Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:35.377602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:35.413978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:35.480214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:35.630913Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:35.634530Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:35.655728Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:35.659553Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:35.685651Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-10T12:19:35.852347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976715680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:35.852646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-10T12:19:35.852905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:35.852936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:35.852949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-10T12:19:35.853251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-10T12:19:35.853301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715680:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-10T12:19:35.853542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-10T12:19:35.853600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:35.854508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715680, response: Status: StatusAccepted TxId: 281474976715680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:19:35.854629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-10T12:19:35.854799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715680:0, at schemeshard: 72057594046644480 2026-01-10T12:19:35.854831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-10T12:19:35.855101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:35.855192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:35.855324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:35.855645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-10T12:19:35.855717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-10T12:19:35.855771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2026-01-10T12:19:35.855836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715680, partId: 0, tablet: 72057594037968897 2026-01-10T12:19:35.855856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:3, partId: 0 2026-01-10T12:19:35.855866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:4, partId: 0 2026-01-10T12:19:35.855873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:5, partId: 0 2026-01-10T12:19:35.855990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715680, at schemeshard: 72057594046644480 2026-01-10T12:19:35.856007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715680, ready parts: 0/1, is published: true 2026-01-10T12:19:35.856019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715680, at schemeshard: 72057594046644480 2026-01-10T12:19:35.857572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-10T12:19:35.857600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation Fi ... WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:19:38.909584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757702 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2026-01-10T12:19:38.909660Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.909929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.909979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.911202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:38.911425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-10T12:19:38.911773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:38.911794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:38.912060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757703 RawX2: 4503608217307444 } TabletId: 72075186224037892 State: 4 2026-01-10T12:19:38.912108Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.912246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757357 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-10T12:19:38.912265Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.912377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757899 RawX2: 4503608217307470 } TabletId: 72075186224037893 State: 4 2026-01-10T12:19:38.912395Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.912438Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-10T12:19:38.912481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757695 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2026-01-10T12:19:38.912497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.912655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.912679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.912727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.912737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.912772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.912782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.912813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.912823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.913824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:19:38.914015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-10T12:19:38.914160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:38.914285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:38.914382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-10T12:19:38.914489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:38.914636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:38.914765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:38.916131Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:19:38.916159Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-01-10T12:19:38.916173Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:19:38.916186Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-10T12:19:38.916375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:19:38.916391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:19:38.916620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:38.916630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:38.916653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-10T12:19:38.916660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-10T12:19:38.916676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:38.916690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:38.919317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706529424757900 RawX2: 4503608217307471 } TabletId: 72075186224037894 State: 4 2026-01-10T12:19:38.919354Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:38.919613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:38.919640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:38.921417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-10T12:19:38.921605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:38.921772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:38.921796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:38.921834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:38.922507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-10T12:19:38.922549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-10T12:19:38.922587Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:38.923297Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TFlatTest::SelectBigRangePerf >> TFlatTest::CrossRW ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2026-01-10T12:19:36.096875Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706517805147368:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:36.096972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:36.292180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.322785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:36.322920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:36.350527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:36.408266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706517805147343:2081] 1768047576096116 != 1768047576096119 2026-01-10T12:19:36.408661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:36.452200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.581016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:38.890070Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706525490029354:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.891266Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.903266Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.980058Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:38.981580Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706525490029327:2081] 1768047578888549 != 1768047578888552 2026-01-10T12:19:39.016704Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:39.016775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:39.019880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:39.150166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:39.191899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.195200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:39.524439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2026-01-10T12:19:30.082884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706493089144051:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:30.083062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:30.283586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:30.296238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:30.296362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:30.361665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:30.364637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:30.366881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706493089144027:2081] 1768047570081752 != 1768047570081755 2026-01-10T12:19:30.410281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710670:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.410329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710670:0 ProgressState 2026-01-10T12:19:30.410381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710670 ready parts: 1/1 2026-01-10T12:19:30.421386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710670 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.421816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.421846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710665:0 ProgressState 2026-01-10T12:19:30.421902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710665 ready parts: 1/1 2026-01-10T12:19:30.421996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710665 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.422142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710669:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.422161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710669:0 ProgressState 2026-01-10T12:19:30.422193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710669 ready parts: 1/1 2026-01-10T12:19:30.422239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710669 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.422398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.422409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710664:0 ProgressState 2026-01-10T12:19:30.422423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710664 ready parts: 1/1 2026-01-10T12:19:30.422482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710664 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.422628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.422648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710668:0 ProgressState 2026-01-10T12:19:30.422670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710668 ready parts: 1/1 2026-01-10T12:19:30.422724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710668 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.422874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710663:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.422887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710663:0 ProgressState 2026-01-10T12:19:30.422901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710663 ready parts: 1/1 2026-01-10T12:19:30.422954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710663 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.423100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.423110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710667:0 ProgressState 2026-01-10T12:19:30.423152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710667 ready parts: 1/1 2026-01-10T12:19:30.423193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710667 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.423420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710662:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.423433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710662:0 ProgressState 2026-01-10T12:19:30.423449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710662 ready parts: 1/1 2026-01-10T12:19:30.423551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710662 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.423724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710661:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.423744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710661:0 ProgressState 2026-01-10T12:19:30.423763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710661 ready parts: 1/1 2026-01-10T12:19:30.423816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710661 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.423971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.423984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710660:0 ProgressState 2026-01-10T12:19:30.424002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710660 ready parts: 1/1 2026-01-10T12:19:30.424121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710660 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.424374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.424386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710659:0 ProgressState 2026-01-10T12:19:30.424417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710659 ready parts: 1/1 2026-01-10T12:19:30.424484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710659 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:30.424689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-10T12:19:30.424701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:30: [72057594046644480] TCreateSysView::TPropose, opId: 281474976710658:0 ProgressState 2026- ... 61Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2026-01-10T12:19:31.535084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715674:0 progress is 1/1 2026-01-10T12:19:31.535099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2026-01-10T12:19:31.535110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715674, ready parts: 1/1, is published: true 2026-01-10T12:19:31.535195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7593706497384113101:2360] message: TxId: 281474976715674 2026-01-10T12:19:31.535230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2026-01-10T12:19:31.535261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715674:0 2026-01-10T12:19:31.535272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715674:0 2026-01-10T12:19:31.535343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 2 2026-01-10T12:19:31.535609Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:19:31.535656Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2026-01-10T12:19:31.536839Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:31.537085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706497384112688 RawX2: 4503603922340115 } TabletId: 72075186224037899 State: 4 2026-01-10T12:19:31.537128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:31.537371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:31.537371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2026-01-10T12:19:31.537418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:31.537571Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 0 TabletID: 72075186224037899 2026-01-10T12:19:31.537594Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2026-01-10T12:19:31.537679Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2026-01-10T12:19:31.537781Z node 1 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2026-01-10T12:19:31.537891Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2026-01-10T12:19:31.539168Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7593706493089144271:2103] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7593706493089144379:2148] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-10T12:19:31.539290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2026-01-10T12:19:31.539383Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037899 reason = ReasonStop 2026-01-10T12:19:31.539495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 1 2026-01-10T12:19:31.539565Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2026-01-10T12:19:31.539606Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2026-01-10T12:19:31.539612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:31.539629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 42], at schemeshard: 72057594046644480 2026-01-10T12:19:31.539657Z node 1 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2026-01-10T12:19:31.539662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:31.540153Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2026-01-10T12:19:31.540357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:12 2026-01-10T12:19:31.540380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2026-01-10T12:19:31.540443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:31.540482Z node 1 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2026-01-10T12:19:31.540523Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-10T12:19:31.540762Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037899 2026-01-10T12:19:31.540808Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037899 2026-01-10T12:19:31.542184Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2026-01-10T12:19:33.307636Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706507274371565:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:33.307741Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:33.319009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:33.426034Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:33.427639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:33.427713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:33.427916Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706507274371538:2081] 1768047573306876 != 1768047573306879 2026-01-10T12:19:33.445571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:33.592296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:33.614063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:33.633638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.313394Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:38.308029Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706507274371565:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.308116Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:41.747905Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-10T12:19:41.758817Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-10T12:19:41.759893Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-10T12:19:41.775734Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7593706537339149232:6100] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock2 >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TFlatTest::RejectByIncomingReadSetSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2026-01-10T12:18:24.896564Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706207489699059:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.896638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:24.928151Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706208434937441:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.929065Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:24.972622Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:24.992261Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:25.214296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:25.222319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:25.255872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:25.255982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:25.260739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:25.260832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:25.302513Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:18:25.302695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:25.305202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:25.394698Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:25.493244Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:25.504015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:25.509052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexdnQTfB.tmp 2026-01-10T12:18:25.509117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexdnQTfB.tmp 2026-01-10T12:18:25.509331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexdnQTfB.tmp 2026-01-10T12:18:25.509435Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:25.551256Z INFO: TTestServer started on Port 61696 GrpcPort 30620 PQClient connected to localhost:30620 2026-01-10T12:18:25.745799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:25.850163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.898539Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:25.974899Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:28.441879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706224669569527:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.442074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.442206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706224669569535:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.442672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706224669569541:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.442808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:28.445621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:28.473164Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706224669569544:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:18:28.699133Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706224669569633:3062] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:28.728309Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706224669569643:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:28.728768Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTc1NGZmNmQtZjkzNDljZjItZGIwZGRkNzUtMmI2ZjA4MGE=, ActorId: [1:7593706224669569523:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxfyyr75pk8znmf71m1a1n, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:28.729393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.731363Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:18:28.731530Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706225614807037:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:28.731934Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NGE2NzA3NDUtMmQyN2EwNGMtMTYxY2JiYjMtMThjYWQ0YTU=, ActorId: [2:7593706225614807013:2305], ActorState: ExecuteState, LegacyTraceId: 01kekxfyz90kz2c3j6r8qrt6sy, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:28.732369Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' be ... 97 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:29.790811Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexRDUiEW.tmp 2026-01-10T12:19:29.790842Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexRDUiEW.tmp 2026-01-10T12:19:29.791013Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b28/r3tmp/yandexRDUiEW.tmp 2026-01-10T12:19:29.791137Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:29.798725Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:29.836159Z INFO: TTestServer started on Port 21476 GrpcPort 3844 PQClient connected to localhost:3844 2026-01-10T12:19:30.097126Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:30.127358Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:30.264128Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:30.487837Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:30.501624Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:33.792288Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593706507461553230:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:33.792407Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:33.792650Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593706507461553238:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:33.792728Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593706507461553244:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:33.793175Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:33.797497Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:19:33.819497Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593706507461553246:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:19:33.852170Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.922403Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593706507461553483:3150] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:19:33.927189Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.958793Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7593706507461553512:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:19:33.959317Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NjkxYzhlZWItNDE3MzI3ZTItZDZkYmQyNWMtNWZlZDNlZTc=, ActorId: [11:7593706507461553228:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxhyrz61de6e72j9qt5j8t, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:19:33.959851Z node 11 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:19:34.067160Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.074013Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [12:7593706510922663805:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:19:34.074502Z node 12 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=12&id=ZGJkNzQwNjAtOTJjZDI0ODItNWM0ZjgxYWMtNzE0NzFlOGI=, ActorId: [12:7593706510922663779:2309], ActorState: ExecuteState, LegacyTraceId: 01kekxhz0f83kpvvcgxtht1067, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:19:34.074999Z node 12 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [11:7593706511756521080:3376] 2026-01-10T12:19:34.480387Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593706490281682664:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.480520Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:34.495183Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593706489447826937:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.495267Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-10T12:19:41.247371Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7593706541821292524:3600] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-10T12:19:41.247415Z node 11 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [11:7593706541821292524:3600] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> Cdc::AwsRegion [GOOD] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2026-01-10T12:19:37.108841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706524649077734:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:37.109331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:37.292279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.299391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:37.299491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:37.353640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:37.362669Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706524649077705:2081] 1768047577107409 != 1768047577107412 2026-01-10T12:19:37.369815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:37.420644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:37.420674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:37.420686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:37.420781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:37.561042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.569236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:37.648765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.116471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:40.603274Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706534693469546:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:40.603350Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:40.621270Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.703520Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:40.705795Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706534693469519:2081] 1768047580602273 != 1768047580602276 2026-01-10T12:19:40.719632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:40.719684Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:40.726979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:40.788934Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:40.788960Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:40.788967Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:40.789033Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:40.851833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.997928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:41.002141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:41.012974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TFlatTest::WriteMergeAndRead >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] >> TFlatTest::SelectRangeNullArgs3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2026-01-10T12:19:36.599739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706517629421269:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:36.599895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:36.844910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:36.845032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:36.896679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:36.903469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.919850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:36.921945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706517629421245:2081] 1768047576598906 != 1768047576598909 2026-01-10T12:19:37.079258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:37.155187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.198349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.284782Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:37.286304Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:19:37.286366Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:37.404452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:37.404740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-10T12:19:37.405044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:37.405078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:37.405317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:37.405347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710668:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-10T12:19:37.405606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-10T12:19:37.405651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:37.406454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:19:37.406591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-10T12:19:37.406723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2026-01-10T12:19:37.406760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-10T12:19:37.407133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:37.407300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:37.407623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2026-01-10T12:19:37.407699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-10T12:19:37.407772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2026-01-10T12:19:37.407799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-10T12:19:37.407811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-10T12:19:37.407892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2026-01-10T12:19:37.407904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2026-01-10T12:19:37.407918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2026-01-10T12:19:37.409216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-10T12:19:37.409248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-10T12:19:37.409389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-10T12:19:37.409415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:19:37.409494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-10T12:19:37.409734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-10T12:19:37.409752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-10T12:19:37.409827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-10T12:19:37.409858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:19:37.409888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-10T12:19:37.409927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change sta ... ate changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:41.396302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706536023859848 RawX2: 4503612512274667 } TabletId: 72075186224037888 State: 4 2026-01-10T12:19:41.396327Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:41.396607Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:41.396682Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:19:41.398100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:41.398131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:41.397847Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:41.399821Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7593706540318827615:2304], serverId# [3:7593706540318827618:2489], sessionId# [0:0:0] 2026-01-10T12:19:41.399838Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7593706540318827614:2303], serverId# [3:7593706540318827617:2488], sessionId# [0:0:0] 2026-01-10T12:19:41.400444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:41.400481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:41.400561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:41.400573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:41.401013Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-10T12:19:41.403085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706536023860073 RawX2: 4503612512274672 } TabletId: 72075186224037890 State: 4 2026-01-10T12:19:41.403128Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:41.401676Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-10T12:19:41.402162Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:41.402610Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:41.403303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:41.403529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:41.403730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:41.403819Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:41.404141Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:19:41.404440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706536023860074 RawX2: 4503612512274673 } TabletId: 72075186224037892 State: 4 2026-01-10T12:19:41.404484Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:41.404699Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:19:41.404722Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:19:41.403536Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:19:41.403935Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:41.405371Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:41.405448Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:19:41.409178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:41.409212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:41.409265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:41.409277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T12:19:41.409297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:41.409327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:41.409337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:41.409994Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:19:41.410025Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-10T12:19:41.411904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:41.412183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:41.412416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:19:41.412459Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:41.412572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:41.412762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:41.412793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:41.412858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:41.412515Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:19:41.412969Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:41.413484Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:19:41.413522Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-10T12:19:41.413552Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-10T12:19:41.413569Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-01-10T12:19:41.413588Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-10T12:19:41.413724Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:19:41.414668Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-01-10T12:19:41.415196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:41.415215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:41.415849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:19:41.415876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:19:41.415932Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:41.413930Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-10T12:19:41.414261Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:19:41.415460Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:41.415529Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:19:41.417246Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:19:41.417298Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2026-01-10T12:19:37.455079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706522764270841:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:37.455149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:37.487989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:37.760637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.779034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:37.779117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:37.797962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:37.853536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:37.854659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706522764270812:2081] 1768047577453839 != 1768047577453842 2026-01-10T12:19:37.933682Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.020096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:38.188188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:38.188397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2026-01-10T12:19:38.188573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-10T12:19:38.188617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-10T12:19:38.188704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715658:0 type: TxMkDir target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-10T12:19:38.188752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:38.188954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:19:38.189011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:38.191050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 38, at schemeshard: 72057594046644480 2026-01-10T12:19:38.191253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2026-01-10T12:19:38.191484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:19:38.191587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:19:38.191753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:19:38.191851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:19:38.191866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706522764271498:2390], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2026-01-10T12:19:38.191888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706522764271498:2390], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 38 2026-01-10T12:19:38.191926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2026-01-10T12:19:38.191955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:19:38.191995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2026-01-10T12:19:38.206503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:19:38.208188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.208332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.208356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2026-01-10T12:19:38.208381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2026-01-10T12:19:38.208410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-10T12:19:38.208628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.208701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.208709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2026-01-10T12:19:38.208718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 2 2026-01-10T12:19:38.208728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:38.208783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2026-01-10T12:19:38.208883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:19:38.208896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2026-01-10T12:19:38.208911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:19:38.209753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2026-01-10T12:19:38.209833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:19:38.210086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.210232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2026-01-10T12:19:38.211403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047578255, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:38.211497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768047578255 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:19:38.211530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1768047578255, at schemeshard: 72057594046644480 2026-01-10T12:19:38.211646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715658:0 128 -> 240 2026- ... schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715661, publications: 2, subscribers: 1 2026-01-10T12:19:38.538755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 1], 12 2026-01-10T12:19:38.538763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715661, [OwnerId: 72057594046644480, LocalPathId: 40], 3 2026-01-10T12:19:38.538899Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{222, redo 163b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-10T12:19:38.538923Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:19:38.539265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.539317Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-10T12:19:38.539335Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:19:38.539376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.539403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2026-01-10T12:19:38.539417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2026-01-10T12:19:38.539429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 5 2026-01-10T12:19:38.539554Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{223, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-10T12:19:38.539579Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:19:38.539675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.539718Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-10T12:19:38.539730Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:19:38.539778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.539786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2026-01-10T12:19:38.539795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 3 2026-01-10T12:19:38.539832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:38.539871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2026-01-10T12:19:38.539880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7593706527059239470:2291] 2026-01-10T12:19:38.539917Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{224, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-10T12:19:38.539942Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:19:38.541793Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:19:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.541810Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.541818Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:19:1:24576:108:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.541832Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:134:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.541946Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2026-01-10T12:19:38.541965Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.541981Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.542032Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2026-01-10T12:19:38.542035Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.542043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.542053Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:121:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:19:38.542074Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:20} commited cookie 1 for step 19 2026-01-10T12:19:38.542098Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2026-01-10T12:19:38.542115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2026-01-10T12:19:38.542211Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:406: TClient[72057594046644480] received poison pill [1:7593706527059239471:2291] 2026-01-10T12:19:38.542234Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594046644480] notify reset [1:7593706527059239471:2291] 2026-01-10T12:19:38.542255Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7593706527059239471:2291] 2026-01-10T12:19:40.817124Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706536679751003:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:40.817830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:40.846777Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.915983Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:40.917651Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706536679750980:2081] 1768047580816464 != 1768047580816467 2026-01-10T12:19:40.953433Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:40.953513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:40.957074Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:41.120925Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.143166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:41.159634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:19:41.165556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:41.827769Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:44.176748Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7593706553859621469:2799] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000085 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2026-01-10T12:19:44.176832Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7593706553859621469:2799] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:15:32.730689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:15:32.874034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:15:32.883144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:15:32.883683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:15:32.883814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:15:33.323473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:15:33.323599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:15:33.337360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047329023959 != 1768047329023962 2026-01-10T12:15:33.355114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:15:33.404400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:15:33.544113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:15:33.939534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:15:33.955563Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:15:34.073500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:15:34.116367Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:15:34.116680Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:15:34.179793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:15:34.179954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:15:34.182139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:15:34.182252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:15:34.182337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:15:34.182738Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:15:34.182894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:15:34.182984Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:15:34.194377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:15:34.252005Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:15:34.252429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:15:34.252550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:15:34.252601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.252668Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:15:34.252712Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.253541Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:15:34.253673Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:15:34.253808Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.253852Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.253897Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:15:34.254078Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.254168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:15:34.254337Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:15:34.254633Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:15:34.254797Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:15:34.257086Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.268651Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:15:34.268755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:15:34.394101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:15:34.399312Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:15:34.399398Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.400749Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.400825Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:15:34.400890Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:15:34.401210Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:15:34.401375Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:15:34.401581Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:15:34.401652Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:15:34.403531Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:15:34.404131Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:15:34.405604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:15:34.405648Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.406435Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:15:34.406531Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.407675Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:15:34.407726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:15:34.407778Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:15:34.407834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:15:34.407877Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:15:34.407938Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:15:34.413078Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:903:2775][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2026-01-10T12:15:34.424070Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:15:34.425924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:15:34.426126Z node 1 :TX_ ... tion][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.039722Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.084534Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.084631Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.084672Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.084714Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.084750Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.084836Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.084867Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.084897Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.084929Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.084959Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.116540Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.116629Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.116668Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.116710Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.116746Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.116837Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.116869Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.116902Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.116936Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.116967Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.138006Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.138098Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.138160Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.138211Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.138249Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.138351Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.138383Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.138415Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.138447Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.138478Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.159361Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.159434Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.159464Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.159495Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.159531Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.159599Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.159622Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.159644Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.159668Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.159688Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.183486Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.183574Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.183613Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.183656Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.183693Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-10T12:19:44.183907Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:19:44.183942Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.183979Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:19:44.184013Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:19:44.184042Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2026-01-10T12:19:44.208170Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:44.208257Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2026-01-10T12:19:44.208484Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-10T12:19:44.209065Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 427 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:19:44.209186Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-10T12:19:44.209377Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 427 accessed 1 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-10T12:19:44.209519Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-10T12:19:44.209750Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:44.209952Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:19:44.210265Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 407 from pos 0 cbcount 1 2026-01-10T12:19:44.211051Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2026-01-10T12:19:44.213066Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-10T12:19:44.213196Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2026-01-10T12:19:44.214258Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037891][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-10T12:19:44.214822Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037891][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 426 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:19:44.214947Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037891][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-10T12:19:44.215106Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 426 accessed 0 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-10T12:19:44.215309Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037891][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-10T12:19:44.215627Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:19:44.215774Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:19:44.216069Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 406 from pos 0 cbcount 1 2026-01-10T12:19:44.216966Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TFlatTest::ShardFreezeRejectBadProtobuf >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TFlatTest::LargeProxyReply |91.8%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::RejectByPerShardReadSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2026-01-10T12:19:32.566594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706500421827056:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:32.566695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:32.803121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:32.803251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:32.816844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:32.833013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:32.834152Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706500421827026:2081] 1768047572565134 != 1768047572565137 2026-01-10T12:19:32.871918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:33.023359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:33.064705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:33.084552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.204695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.248740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.572835Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:37.567199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706500421827056:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:37.567307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:38.207308Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706526571584059:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.207346Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.217143Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:38.293579Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706526571584032:2081] 1768047578206647 != 1768047578206650 2026-01-10T12:19:38.300857Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:38.306484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:38.306558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:38.314753Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:38.316047Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.520389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:38.541704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.606824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.650211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.212492Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:41.988625Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706539441879321:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:41.990057Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:42.007447Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.081090Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:42.117041Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:42.117126Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:42.123343Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.312163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.312567Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.321989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.336065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.402997Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.463442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.994709Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TLocksTest::UpdateLockedKey >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TFlatTest::Mix_DML_DDL >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TLocksTest::SetLockFail >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2026-01-10T12:19:41.657466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706541799791223:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:41.657519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:41.898515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:41.898628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:41.937440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.964106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:41.966607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:41.968851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706541799791194:2081] 1768047581655991 != 1768047581655994 2026-01-10T12:19:42.211821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.220549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.225803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.237288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:19:42.345224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.499911Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:42.509389Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:42.539407Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:42.549087Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-10T12:19:42.665982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:42.719907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:42.720239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:42.720794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:42.720837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-10T12:19:42.720850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:42.720873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:42.720901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:42.720915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:42.721408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-10T12:19:42.721545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:42.722273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:42.722329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-10T12:19:42.722900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-10T12:19:42.723091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-10T12:19:42.723260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:19:42.723295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:19:42.723398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-10T12:19:42.723454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:19:42.723482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706541799791726:2250], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-10T12:19:42.723513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706541799791726:2250], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-10T12:19:42.723554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:42.723586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-10T12:19:42.723938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:42.724063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:42.727328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:42.727437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:42.727450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-01-10T12:19:42.727467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-10T12:19:42.727494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-10T12:19:42.727727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:42.727849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:42.727857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to ... :78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:45.672220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706557696424851 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-10T12:19:45.672256Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:45.672382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706557696425158 RawX2: 4503608217307433 } TabletId: 72075186224037891 State: 4 2026-01-10T12:19:45.672409Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:45.672529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706557696425158 RawX2: 4503608217307433 } TabletId: 72075186224037891 State: 4 2026-01-10T12:19:45.672548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:45.672681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:45.672694Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-10T12:19:45.672747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:45.672820Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:19:45.672821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:45.672835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:45.672872Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:19:45.672878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:45.672889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:45.672925Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:45.672928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:45.672939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:45.672975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:45.672985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:45.672986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:45.674287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:45.674330Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:19:45.674358Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-10T12:19:45.674529Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:19:45.674552Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:19:45.674556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:45.674768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:45.674952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:45.675058Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:19:45.675067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:45.675181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:45.675187Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:19:45.675285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:45.675398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:45.675491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:45.675638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:45.675662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:45.675712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:45.675737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:45.675759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:45.676160Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:19:45.676184Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-10T12:19:45.676201Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:19:45.676217Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:19:45.676915Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:45.676981Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:19:45.677244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:45.677273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T12:19:45.677304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:45.677319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:45.677551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:45.677568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:45.677591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:45.677607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:45.677626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:45.677661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:45.678329Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:45.678384Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:19:45.679662Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:45.679714Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:19:45.971256Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-10T12:19:45.971646Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-10T12:19:45.972013Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-10T12:19:45.972364Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] >> TFlatTest::GetTabletCounters [GOOD] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> TObjectStorageListingTest::TestFilter >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2026-01-10T12:19:36.523760Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706517333236038:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:36.523844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:36.719283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.740949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:36.741051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:36.766662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:36.825132Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:36.826143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706517333236010:2081] 1768047576522294 != 1768047576522297 2026-01-10T12:19:36.961967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:36.991618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.008459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.150522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.208175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.267736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.328723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.394431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.459932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.520592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.536568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:37.590911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.650627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.714106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.771636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.832480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.890277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.953185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.016765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.085934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.159473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.232263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.301602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.378029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.451788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.524511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.595100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.658911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.739199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... thId [OwnerId: 72057594046644480, LocalPathId: 42] was 4 2026-01-10T12:19:46.985488Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037895 state Ready 2026-01-10T12:19:46.985536Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2026-01-10T12:19:46.985675Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037894 state Ready 2026-01-10T12:19:46.985700Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-10T12:19:46.993571Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:7593706563910963102:3209], serverId# [2:7593706563910963103:3210], sessionId# [0:0:0] 2026-01-10T12:19:46.993710Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:46.995346Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:46.995412Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.003993Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037895, clientId# [2:7593706568205930408:3216], serverId# [2:7593706568205930409:3217], sessionId# [0:0:0] 2026-01-10T12:19:47.004183Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.005827Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.005887Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.009681Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.011095Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.011157Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.014555Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.016565Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.016633Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.020418Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.022151Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.022211Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.025352Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:47.028541Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:47.028594Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:47.028957Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.030290Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.030348Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.031422Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:47.031952Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:47.031985Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:47.034181Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.035618Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.035668Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.039503Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.041038Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.041109Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.044820Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.046210Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.046278Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.050242Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.051639Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.051702Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.059329Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.060992Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.061060Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.065033Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.066245Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:47.066794Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:47.066821Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:47.067312Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.067375Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.069218Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:47.071500Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:47.071527Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:47.077875Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.084487Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.084614Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.088543Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.090853Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.090922Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-10T12:19:47.094533Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-10T12:19:47.096388Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-10T12:19:47.096442Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-10T12:19:47.099456Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-10T12:19:47.101946Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-10T12:19:47.101999Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2026-01-10T12:19:47.102904Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-01-10T12:19:47.103255Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-01-10T12:19:47.103542Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-10T12:19:47.103798Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-10T12:19:47.104053Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-10T12:19:47.104369Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 1:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:93:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:92:2120] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:208:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:88:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:91:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:93:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:92:2120] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:208:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:89:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:92:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:94:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:93:2120] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:209:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:92:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:95:2057] recipient: [31:94:2122] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:97:2057] recipient: [31:94:2122] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:96:2123] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:212:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:92:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:95:2057] recipient: [32:94:2122] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:97:2057] recipient: [32:94:2122] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:96:2123] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:212:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:78:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:81:2057] recipient: [35:80:2112] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:83:2057] recipient: [35:80:2112] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:82:2113] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:198:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:79:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:82:2057] recipient: [37:81:2112] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:84:2057] recipient: [37:81:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:83:2113] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:199:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:81:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:84:2057] recipient: [38:83:2114] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:86:2057] recipient: [38:83:2114] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:85:2115] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:201:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:84:2057] recipient: [39:83:2114] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:86:2057] recipient: [39:83:2114] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:85:2115] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:201:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2114] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:87:2057] recipient: [40:84:2114] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2115] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2026-01-10T12:19:43.602120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706550683289624:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:43.603291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:43.839990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:43.840061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:43.891527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:43.899002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:43.952169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706550683289596:2081] 1768047583600396 != 1768047583600399 2026-01-10T12:19:43.953001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:44.132890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:44.141886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:44.191155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.245270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.610538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; insert finished 8203 usec 8307 usec 8223 usec 8407 usec 8083 usec 7360 usec 7290 usec 10420 usec 8908 usec 8723 usec 2026-01-10T12:19:46.825223Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706561112552388:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:46.825363Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:46.839703Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:46.916731Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:46.932250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:46.932339Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:46.963525Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:47.057401Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.145186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.171247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:47.831536Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2026-01-10T12:19:43.853715Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706548031032523:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:43.853997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:44.097975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.106239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:44.106348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:44.155261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:44.231396Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:44.232118Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706548031032405:2081] 1768047583849393 != 1768047583849396 2026-01-10T12:19:44.369589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.410012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:44.490209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:47.256022Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706565236615425:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.256102Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:47.274349Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:47.371420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:47.371491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:47.383466Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706565236615400:2081] 1768047587253925 != 1768047587253928 2026-01-10T12:19:47.396390Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:47.407498Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:47.412975Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.618325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.634474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::SelectRangeBytesLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... r refreshed! new actor is[31:83:2113] Leader for TabletID 72057594037927937 is [31:83:2113] sender: [31:199:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:85:2057] recipient: [32:84:2115] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:87:2057] recipient: [32:84:2115] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:86:2116] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:202:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:83:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:86:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:88:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:87:2116] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:105:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:85:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:88:2057] recipient: [36:87:2117] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:90:2057] recipient: [36:87:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:89:2118] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:205:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2117] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:91:2057] recipient: [37:88:2117] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2118] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:108:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:88:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:91:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:93:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:92:2120] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:208:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:88:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:91:2057] recipient: [39:90:2119] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:93:2057] recipient: [39:90:2119] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:92:2120] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:208:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:89:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:92:2057] recipient: [40:91:2119] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:94:2057] recipient: [40:91:2119] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:93:2120] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:209:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:92:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:95:2057] recipient: [41:94:2122] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:97:2057] recipient: [41:94:2122] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:96:2123] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:212:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:92:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:95:2057] recipient: [42:94:2122] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:97:2057] recipient: [42:94:2122] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:96:2123] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:212:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:93:2057] recipient: [26:92:2121] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:95:2057] recipient: [26:92:2121] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:94:2122] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:210:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:91:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:94:2057] recipient: [27:93:2121] Leader for TabletID 72057594037927937 is [27:95:2122] sender: [27:96:2057] recipient: [27:93:2121] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:52:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:78:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:81:2057] recipient: [30:80:2112] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:83:2057] recipient: [30:80:2112] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:82:2113] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:198:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:78:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:81:2057] recipient: [31:80:2112] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:83:2057] recipient: [31:80:2112] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:82:2113] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:198:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:79:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:84:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:83:2113] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:199:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:82:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:85:2057] recipient: [34:84:2115] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:87:2057] recipient: [34:84:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:86:2116] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:202:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:83:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:86:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:88:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:87:2116] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:203:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2118] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:91:2057] recipient: [36:88:2118] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2119] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2118] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:91:2057] recipient: [37:88:2118] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2119] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:206:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:87:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:90:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:92:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:91:2119] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:207:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2026-01-10T12:19:33.480961Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706505934598122:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:33.481054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:33.696895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:33.697008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:33.734550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:33.777353Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:33.777560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:33.782311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706505934598099:2081] 1768047573480063 != 1768047573480066 2026-01-10T12:19:33.933729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:33.965998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.004603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.489208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:38.481048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706505934598122:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.481118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:41.358020Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-10T12:19:41.375154Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [1768047580866:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-10T12:19:41.382764Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7593706535999375830:6125] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2026-01-10T12:19:41.382851Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593706535999375830:6125] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2026-01-10T12:19:42.028369Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706543694010643:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:42.029487Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:42.052104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.139783Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:42.148305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:42.148390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:42.177954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.295531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.372648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.382062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.396472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:43.034775Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:47.028892Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706543694010643:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.028953Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:51.195630Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-10T12:19:51.209246Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-10T12:19:51.211183Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-10T12:19:51.212114Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7593706578053755598:6108] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::SelectRangeNullArgs4 [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2026-01-10T12:19:41.585386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706542458022697:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:41.586171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:41.817979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.850309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:41.850434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:41.902202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:41.936029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:41.940272Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706542458022665:2081] 1768047581584696 != 1768047581584699 2026-01-10T12:19:42.095855Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.138256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.145592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.270401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.449683Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:42.456395Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:42.486234Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:42.496351Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-10T12:19:42.602669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:42.656497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:42.656847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:42.657340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:42.657405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-10T12:19:42.657430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:42.657453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:42.657487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:42.657507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:42.657653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-10T12:19:42.657758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:42.658434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:42.658475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-10T12:19:42.659076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-10T12:19:42.659294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-10T12:19:42.659486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:19:42.659508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:19:42.659653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-10T12:19:42.659747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:19:42.659765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706542458023183:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 38 2026-01-10T12:19:42.659784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706542458023183:2239], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 40 2026-01-10T12:19:42.659821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:42.659855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-10T12:19:42.660192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:42.660319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:42.662562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-10T12:19:42.662646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-10T12:19:42.662657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-01-10T12:19:42.662690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-10T12:19:42.662708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-10T12:19:42.662887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-10T12:19:42.662936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-10T12:19:42.662942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-01-10T12:19:42.662950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... 6-01-10T12:19:49.219925Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:19:49.219928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:49.219941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:49.219974Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:49.219975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:49.219986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:49.220018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:49.220021Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:49.220035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:49.220713Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:49.220867Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:19:49.220913Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:19:49.221013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:49.221194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:49.221370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:49.221541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:49.221569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:49.221620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-10T12:19:49.221680Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:49.221764Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:19:49.222049Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:19:49.222081Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-10T12:19:49.222244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:49.222269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T12:19:49.222327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:19:49.222474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 41] was 1 2026-01-10T12:19:49.222593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:49.222755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:49.222866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:49.223000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:49.223109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:49.223253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:49.223273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:49.223426Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:19:49.223495Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:19:49.224466Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:49.224550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:49.224577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 41], at schemeshard: 72057594046644480 2026-01-10T12:19:49.224634Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-10T12:19:49.224644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:49.224663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:49.224667Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:19:49.224680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:49.224682Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-10T12:19:49.225340Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:19:49.225391Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-10T12:19:49.225944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:19:49.225965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:19:49.225991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:49.225999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:49.226017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:49.226029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:49.226050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:49.226779Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-01-10T12:19:49.226804Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:49.226808Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-10T12:19:49.226823Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-10T12:19:49.226876Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:19:49.227674Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:49.228261Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:49.228306Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:19:49.510121Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-01-10T12:19:49.510642Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-01-10T12:19:49.511283Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-10T12:19:49.511843Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-10T12:19:49.512357Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-10T12:19:49.513168Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) 2026-01-10T12:19:49.516529Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> TObjectStorageListingTest::MaxKeysAndSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2026-01-10T12:19:49.595211Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706574816456726:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:49.595347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:49.631804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:49.884396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:49.884748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:49.902955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:49.935332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:49.985426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:49.989915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706574816456669:2081] 1768047589588256 != 1768047589588259 2026-01-10T12:19:50.212573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.215658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:50.266153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.401734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2026-01-10T12:19:50.424561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:19:50.444694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:19:50.464838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2026-01-10T12:19:45.961305Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706556734347096:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:45.961422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:46.212654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:46.219992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:46.220123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:46.277204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:46.315451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:46.325373Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706556734346991:2081] 1768047585956137 != 1768047585956140 2026-01-10T12:19:46.417705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:46.512778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:46.595352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:46.760173Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:46.763722Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:46.784780Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:46.789188Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:46.810458Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.811707Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:19:46.811764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:46.813803Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-10T12:19:46.814306Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:46.814328Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:46.814503Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.816140Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:19:46.816222Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:46.818723Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.820049Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:19:46.820112Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:19:46.822330Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2026-01-10T12:19:46.822734Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:19:46.822754Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-01-10T12:19:46.822891Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.824817Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:19:46.824874Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:19:46.831271Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.833322Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.833463Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.834956Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.835100Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.835781Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.836739Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.836856Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.837345Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.838201Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.838316Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.838889Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.839622Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.839839Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.840349Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.841113Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.841217Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.841682Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.842438Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.842529Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.843014Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.843783Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.843886Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.844694Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.845486Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.845574Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.846015Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.847001Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.847105Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.847693Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.848580Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.848695Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.849200Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.850001Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.850110Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:46.850896Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-10T12:19:46.851785Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-10T12:19:46.851909Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:46.852378Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-10T12:19:46.853207Z node 1 :TX_DATASHA ... 72057594046644480 2026-01-10T12:19:50.172881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.172889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.172922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.172930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.172959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.172969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.173002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.173011Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:50.173016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.173022Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:19:50.173077Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:50.173127Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-10T12:19:50.173135Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-10T12:19:50.173492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.173499Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-10T12:19:50.173512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.173728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706576099868205 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-10T12:19:50.173761Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:50.174479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:50.174499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:50.174508Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:19:50.175481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:50.175678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-10T12:19:50.175854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:50.175979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:50.176090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:50.176240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:50.176649Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:19:50.176673Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-10T12:19:50.176688Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:19:50.176700Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-10T12:19:50.176834Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:19:50.177431Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:50.177516Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:19:50.178616Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:19:50.178645Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:19:50.178657Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-10T12:19:50.178668Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-10T12:19:50.178678Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:19:50.179178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:50.179192Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:50.179246Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:19:50.179290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:50.179371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:19:50.179447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:50.179500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:50.179605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:50.179713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:50.179724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-10T12:19:50.179767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:50.179776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:50.179811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:50.179922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:50.179934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-10T12:19:50.180641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:50.180650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:50.180667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:50.180672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:50.180681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:19:50.180685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:19:50.180694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:50.180702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:50.180732Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:19:50.180789Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:19:50.181157Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:50.181951Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:19:50.182002Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:19:50.183362Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:19:50.183463Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SplitInvalidPath >> TLocksTest::BrokenLockErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2026-01-10T12:19:46.617022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706561258698716:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:46.617093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:46.833875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:46.833943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:46.845637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:46.937674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:46.941192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:46.946106Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706561258698687:2081] 1768047586615232 != 1768047586615235 2026-01-10T12:19:47.141240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.147676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:47.153560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.213919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.073990Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706580789324194:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:50.074208Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:50.102189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.175175Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:50.176795Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706580789324157:2081] 1768047590072611 != 1768047590072614 2026-01-10T12:19:50.211412Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:50.211492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:50.216847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:50.278082Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.428127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:50.449280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] >> TLocksTest::GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2026-01-10T12:19:47.190329Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706565083334653:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.190652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:47.431998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:47.432139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:47.432280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.472720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:47.536666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:47.537623Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706565083334546:2081] 1768047587184684 != 1768047587184687 2026-01-10T12:19:47.651988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.749181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.798964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Error 128: Mix freeze cmd with other options is forbidden 2026-01-10T12:19:47.908653Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706565083335458:2565] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } 2026-01-10T12:19:47.913434Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706565083335471:2571] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2026-01-10T12:19:47.915448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706565083335477:2576] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-10T12:19:47.917494Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706565083335483:2581] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-10T12:19:50.643713Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706578315660463:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:50.643860Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:50.737314Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.742814Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:50.778219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:50.778325Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:50.781008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:50.969574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:51.001764Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.013864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Split >> TFlatTest::ShardUnfreezeNonFrozen >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:122:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [1:125:2150] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:134:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-01-10T12:17:19.731545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:17:19.731629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.731702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:17:19.731750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:17:19.731788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:17:19.731817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:17:19.731883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:17:19.731967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:17:19.732873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:17:19.733150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:17:19.836477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-10T12:17:19.836530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:17:19.837187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:138:2160] sender: [1:188:2058] recipient: [1:15:2062] 2026-01-10T12:17:19.845317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:17:19.845756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:17:19.845891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:17:19.850913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:17:19.851076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:17:19.851675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:17:19.852251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:17:19.854406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.854573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:17:19.855623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:17:19.855668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:17:19.855827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:17:19.855869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:17:19.855912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:17:19.855988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:17:19.972372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:17:19.973771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... in: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1644 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-10T12:19:53.666411Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-10T12:19:53.666547Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1644 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-10T12:19:53.666651Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1644 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-10T12:19:53.667246Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 845 RawX2: 420906797822 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:19:53.667284Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-10T12:19:53.667390Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 845 RawX2: 420906797822 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:19:53.667437Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:19:53.667516Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 845 RawX2: 420906797822 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-10T12:19:53.667580Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:53.667619Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:19:53.667660Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:19:53.667701Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:19:53.667729Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-10T12:19:53.670501Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:19:53.670633Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:19:53.670929Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:19:53.670982Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2026-01-10T12:19:53.671030Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-10T12:19:53.671067Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2026-01-10T12:19:53.671136Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-10T12:19:53.671176Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 240 -> 240 2026-01-10T12:19:53.675263Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-10T12:19:53.675316Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-10T12:19:53.675434Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:19:53.675475Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:19:53.675524Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-10T12:19:53.675562Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:19:53.675599Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-10T12:19:53.675642Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-10T12:19:53.675682Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-10T12:19:53.675711Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-10T12:19:53.675849Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:19:53.675893Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-10T12:19:53.678772Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-10T12:19:53.678816Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-10T12:19:53.679243Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-10T12:19:53.679356Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-10T12:19:53.679399Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [98:941:2901] TestWaitNotification: OK eventTxId 1003 2026-01-10T12:19:53.680022Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:19:53.680317Z node 98 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 348us result status StatusSuccess 2026-01-10T12:19:53.680992Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestConcatToLongKey [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] >> TFlatTest::SelectRangeReverse >> TLocksTest::Range_Pinhole >> TLocksTest::CK_GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2026-01-10T12:19:39.815635Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706531609813572:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:39.815941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:40.025699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.072381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:40.072492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:40.111273Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706531609813469:2081] 1768047579812147 != 1768047579812150 2026-01-10T12:19:40.116951Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:40.119234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:40.298730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.300867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:40.363780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.488322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.535901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.820258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:44.074256Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706552312997440:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:44.076056Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:44.104640Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.173975Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:44.176257Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706552312997413:2081] 1768047584072939 != 1768047584072942 2026-01-10T12:19:44.213361Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:44.213444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:44.217611Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:44.401067Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.424848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:44.443218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.502509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.551435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:45.086333Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:48.211701Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706568914928719:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:48.212336Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:48.221601Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.282294Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:48.319166Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:48.319248Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:48.329469Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:48.449978Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:48.456596Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.465384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.507467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.546143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.883073Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706581162218437:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:51.883163Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:51.953616Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.958667Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:51.967878Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706581162218407:2081] 1768047591882241 != 1768047591882244 2026-01-10T12:19:51.991940Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:51.992020Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:51.996965Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:52.165653Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:52.183807Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:52.216725Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:52.233403Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:52.276649Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:54:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:78:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:81:2057] recipient: [37:80:2112] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:83:2057] recipient: [37:80:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:82:2113] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:198:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:79:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:84:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:83:2113] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:199:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:82:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:85:2057] recipient: [39:84:2115] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:87:2057] recipient: [39:84:2115] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:86:2116] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:202:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2115] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:87:2057] recipient: [40:84:2115] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2116] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:83:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:86:2057] recipient: [41:85:2115] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:88:2057] recipient: [41:85:2115] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:87:2116] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:203:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:86:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:89:2057] recipient: [42:88:2118] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:91:2057] recipient: [42:88:2118] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:90:2119] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:206:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:86:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:89:2057] recipient: [43:88:2118] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:91:2057] recipient: [43:88:2118] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:90:2119] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:206:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:87:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:90:2057] recipient: [44:89:2118] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:92:2057] recipient: [44:89:2118] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:91:2119] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:207:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] >> TLocksTest::Range_BrokenLockMax >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2026-01-10T12:19:49.699691Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706573316082386:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:49.699744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:49.953317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:49.953424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:50.001637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.024229Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706573316082358:2081] 1768047589698297 != 1768047589698300 2026-01-10T12:19:50.027134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:50.031539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:50.236293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:50.238303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.241683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:50.322177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.481627Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:50.486650Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:50.511939Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:50.517379Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-10T12:19:50.673934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:19:50.674382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:50.674900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:50.674947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-10T12:19:50.674968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:50.674985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:19:50.675017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:50.675032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:50.675166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-10T12:19:50.675297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:19:50.676148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:50.676183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-10T12:19:50.676791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-10T12:19:50.677002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-10T12:19:50.677193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:19:50.677215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:19:50.677343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-10T12:19:50.677426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:19:50.677452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706573316082880:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-10T12:19:50.677473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7593706573316082880:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-10T12:19:50.677506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-10T12:19:50.677534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-10T12:19:50.677872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:50.678021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-10T12:19:50.679857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:50.679942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:50.679952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-01-10T12:19:50.679968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-10T12:19:50.679980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-10T12:19:50.680318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-10T12:19:50.680373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Gen ... id: 281474976715784:0 2026-01-10T12:19:54.364136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715784:0 2026-01-10T12:19:54.364162Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2026-01-10T12:19:54.364190Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-10T12:19:54.364218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-10T12:19:54.364310Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:54.364424Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2026-01-10T12:19:54.364461Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:19:54.364516Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-10T12:19:54.366482Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:54.366577Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7593706597778474745:2678], serverId# [2:7593706597778474750:3626], sessionId# [0:0:0] 2026-01-10T12:19:54.366660Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:19:54.366714Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-10T12:19:54.366757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706593483505751 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-10T12:19:54.366815Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:54.367176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:54.367218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037888 was deleted 2026-01-10T12:19:54.368011Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-10T12:19:54.368580Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:19:54.368838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:54.368849Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:54.368925Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop Check that tablet 72075186224037889 was deleted 2026-01-10T12:19:54.369108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:54.369337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:54.369363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:54.369421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:19:54.369565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706593483506036 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2026-01-10T12:19:54.369593Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:19:54.369600Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:54.369615Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:19:54.369705Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:19:54.370393Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2026-01-10T12:19:54.370437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:54.370467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:54.370522Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:54.370616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:54.370647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:54.371012Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:19:54.371138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706593483506035 RawX2: 4503608217307430 } TabletId: 72075186224037890 State: 4 2026-01-10T12:19:54.371178Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:54.371684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:54.371707Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:19:54.371720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:54.372412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:19:54.372584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:19:54.372716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:19:54.372873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:19:54.372910Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-10T12:19:54.372933Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:19:54.372959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:54.372991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:19:54.373026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:54.373674Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:19:54.373775Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 Check that tablet 72075186224037890 was deleted 2026-01-10T12:19:54.374307Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-10T12:19:54.374334Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:19:54.374662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:19:54.374681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:19:54.374708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:19:54.374742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:19:54.374775Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:54.375103Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2026-01-10T12:19:54.375340Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:19:54.375403Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 Check that tablet 72075186224037891 was deleted 2026-01-10T12:19:54.376787Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] >> TFlatTest::MiniKQLRanges >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2026-01-10T12:19:50.902905Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706579953481011:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:50.903052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:51.143959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.173880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:51.173975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:51.242624Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706579953480987:2081] 1768047590902110 != 1768047590902113 2026-01-10T12:19:51.246319Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:51.260736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:51.346831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.359680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:51.359710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:51.359723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:51.359839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:51.520834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:51.613192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.915494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TDataShardLocksTest::Points_OneTx >> TLocksTest::BrokenLockUpdate |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TLocksTest::SetEraseSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2026-01-10T12:19:47.958543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706567243632181:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.958597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:48.191545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.197366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:48.197507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:48.242401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:48.280029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:48.302825Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706567243632153:2081] 1768047587956641 != 1768047587956644 2026-01-10T12:19:48.477971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.491957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:48.556398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.968540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:52.707641Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7593706588718472427:4332] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2026-01-10T12:19:52.707716Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593706588718472427:4332] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2026-01-10T12:19:53.234379Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706590473891488:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:53.234485Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:53.281868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.370559Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:53.371721Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706590473891466:2081] 1768047593231544 != 1768047593231547 2026-01-10T12:19:53.403083Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:53.403183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:53.415858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:53.568014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.598119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:53.621573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.264535Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:58.093275Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7593706607653764451:4335] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2026-01-10T12:19:58.093353Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7593706607653764451:4335] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2026-01-10T12:19:58.233589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706590473891488:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.234192Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2026-01-10T12:19:52.991566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706585692927597:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:52.992003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:53.182419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.213460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:53.213561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:53.269645Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706585692927488:2081] 1768047592988010 != 1768047592988013 2026-01-10T12:19:53.279796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:53.281709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:53.339449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.486341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:53.569257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.277170Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706604982125108:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:56.277606Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:56.300482Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.367988Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706604982124986:2081] 1768047596271396 != 1768047596271399 2026-01-10T12:19:56.372876Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:56.410292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.410391Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.413600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.498576Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.599041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.608988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:56.624836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TFlatTest::SplitThenMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2026-01-10T12:19:49.802838Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706575385851353:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:49.803105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:50.032240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.041652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:50.041969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:50.077294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:50.176203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:50.237553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.367906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:50.447164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.582965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.627721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.667287Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715662: Validate (783): Key validation status: 3 2026-01-10T12:19:50.667394Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7593706579680819688:2693] txid# 281474976715662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:19:50.667491Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7593706579680819688:2693] txid# 281474976715662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:19:50.667539Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7593706579680819688:2693] txid# 281474976715662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-10T12:19:50.670737Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715663: Validate (783): Key validation status: 3 2026-01-10T12:19:50.670837Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7593706579680819710:2700] txid# 281474976715663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:19:50.670895Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7593706579680819710:2700] txid# 281474976715663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:19:50.670921Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7593706579680819710:2700] txid# 281474976715663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-10T12:19:50.673922Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715664: Validate (783): Key validation status: 3 2026-01-10T12:19:50.673998Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7593706579680819717:2704] txid# 281474976715664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:19:50.674050Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7593706579680819717:2704] txid# 281474976715664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:19:50.674087Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7593706579680819717:2704] txid# 281474976715664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-10T12:19:50.676691Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715665: Validate (783): Key validation status: 3 2026-01-10T12:19:50.676822Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7593706579680819723:2707] txid# 281474976715665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-10T12:19:50.676896Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7593706579680819723:2707] txid# 281474976715665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-10T12:19:50.676918Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7593706579680819723:2707] txid# 281474976715665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-10T12:19:52.931936Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706587272715144:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:52.932027Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:52.951152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.027079Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706587272715116:2081] 1768047592930416 != 1768047592930419 2026-01-10T12:19:53.040844Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:53.057276Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:53.057371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:53.061406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:53.202198Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.257006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:53.262846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:53.276005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:53.332232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:53.382833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.504752Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706606662788449:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:56.504842Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:56.541714Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.638556Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:56.641811Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706606662788422:2081] 1768047596503611 != 1768047596503614 2026-01-10T12:19:56.664111Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.664192Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.682367Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.706214Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.848569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.867303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.924152Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.966268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow |91.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TPQTest::TestPQReadAhead [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TPQTest::TestOwnership >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TFlatTest::RejectByPerRequestSize >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TMiniKQLEngineFlatTest::TestLengthPushdown |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TFlatTest::MergeEmptyAndWrite [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2026-01-10T12:19:56.253723Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706604781682775:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:56.254284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:56.521761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.521869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.525421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.580615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.643647Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:56.645624Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706604781682747:2081] 1768047596251638 != 1768047596251641 2026-01-10T12:19:56.742266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.832389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.836737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:56.880661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.002970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706604781683654:2564] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-10T12:19:59.756451Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706617783139376:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:59.756509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:59.766682Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:59.858624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:59.858758Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:59.860810Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:59.862093Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706617783139349:2081] 1768047599754840 != 1768047599754843 2026-01-10T12:19:59.876152Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:59.877162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.106940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:00.114715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.123311Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.167623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:20:00.182456Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706622078107596:2595] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2026-01-10T12:20:00.186531Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:20:00.199037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TLocksTest::UpdateLockedKey [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TPQTest::TestWritePQ [GOOD] >> TLocksTest::SetLockNothing >> TPQTest::TestWriteOffsetWithBigMessage >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2026-01-10T12:19:56.115591Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706606181726790:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:56.115809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:56.356802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.379892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.380011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.405426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.481083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:56.488388Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706606181726686:2081] 1768047596111198 != 1768047596111201 2026-01-10T12:19:56.556574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:56.556597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:56.556606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:56.556721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:56.587377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.757013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.832487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.121672Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:00.061564Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706619794390197:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.061623Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.082557Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.149261Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:00.150478Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706619794390159:2081] 1768047600060202 != 1768047600060205 2026-01-10T12:20:00.181868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:00.181943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:00.186621Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:00.233435Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:00.233470Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:00.233478Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:00.233573Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:00.266167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.430487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:00.456195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.915499Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7593706619794391699:2471], Recipient [2:7593706619794391044:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2026-01-10T12:20:00.915536Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-10T12:20:00.915775Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:20:00.915979Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2026-01-10T12:20:00.916024Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2026-01-10T12:20:00.916054Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2026-01-10T12:20:00.916110Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2026-01-10T12:20:00.916139Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2026-01-10T12:20:00.916226Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2026-01-10T12:20:00.925826Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7593706619794391703:2472], Recipient [2:7593706619794391044:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2026-01-10T12:20:00.925870Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-10T12:20:00.926016Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:20:00.926205Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2026-01-10T12:20:00.926262Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2026-01-10T12:20:00.926340Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2026-01-10T12:19:56.935690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706606423904143:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:56.937064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.145283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.152502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:57.152610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:57.213549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:57.280975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706606423904115:2081] 1768047596932875 != 1768047596932878 2026-01-10T12:19:57.283650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:57.419472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.491692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:57.502756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:57.584829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.941735Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:00.260167Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706623626784355:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.260229Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.299287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.370771Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:00.391681Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:00.391813Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:00.394164Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:00.483971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.615490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:00.620588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:00.632976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for Tab ... t: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:84:2115] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:87:2057] recipient: [35:84:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:86:2116] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:202:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:83:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:85:2115] Leader for TabletID 72057594037927937 is [36:87:2116] sender: [36:88:2057] recipient: [36:85:2115] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:203:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:86:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:89:2057] recipient: [45:88:2118] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:91:2057] recipient: [45:88:2118] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:90:2119] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:206:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:86:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:89:2057] recipient: [46:88:2118] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:91:2057] recipient: [46:88:2118] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:90:2119] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:206:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:87:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:90:2057] recipient: [47:89:2118] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:92:2057] recipient: [47:89:2118] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:91:2119] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:207:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2026-01-10T12:19:54.849585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706596162814968:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:54.849681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:55.068171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.072274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:55.072428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:55.128613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:55.130931Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:55.131991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706596162814943:2081] 1768047594848687 != 1768047594848690 2026-01-10T12:19:55.303598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:55.319734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:19:55.332005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.381610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976715659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2026-01-10T12:19:55.385662Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706600457783046:2499] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-10T12:19:58.162085Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706613690813677:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.162174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:58.194824Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.334365Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:58.335549Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:58.335610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:58.336233Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706613690813550:2081] 1768047598157250 != 1768047598157253 2026-01-10T12:19:58.362053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:58.463301Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.562901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.588089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.770449Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:58.777873Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:58.801212Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:58.805684Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:58.837830Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.839950Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.840142Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.841517Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.841654Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.842222Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.843013Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.843128Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.843584Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2026-01-10T12:19:58.844558Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.844675Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.845098Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.845737Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.846026Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.846413Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.847095Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.847197Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.847591Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2026-01-10T12:19:58.848194Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.848484Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.848838Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.849322Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.849399Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.849727Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2026-01-10T12:19:58.850412Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.850509Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.851334Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.851826Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.851910Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.852323Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2026-01-10T12:19:58.853000Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2026-01-10T12:19:58.853099Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:19:58.853540Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2026-01-10T12:19:58.854149Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:19:58.854182Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:19:58.854389Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:19:58.854784Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037 ... :TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7593706613690814402 RawX2: 4503608217307375 } Origin: 72075186224037889 State: 5 TxId: 281474976710693 Step: 0 Generation: 1 2026-01-10T12:19:59.307354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710693:0, shardIdx: 72057594046644480:2, shard: 72075186224037889, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307426Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 129 -> 240 2026-01-10T12:19:59.307608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307755Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037889 state PreOffline 2026-01-10T12:19:59.307782Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:19:59.307789Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:19:59.307878Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037894 state PreOffline 2026-01-10T12:19:59.307902Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-10T12:19:59.308031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:19:59.308162Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-10T12:19:59.308176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2026-01-10T12:19:59.308188Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-10T12:19:59.308198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2026-01-10T12:19:59.308207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710693, ready parts: 1/1, is published: true 2026-01-10T12:19:59.308243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7593706617985782550:2416] message: TxId: 281474976710693 2026-01-10T12:19:59.308262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2026-01-10T12:19:59.308278Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-01-10T12:19:59.308287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710693:0 2026-01-10T12:19:59.308367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:19:59.308962Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:19:59.309022Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-10T12:19:59.310432Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:19:59.310488Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state 2026-01-10T12:19:59.311766Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:59.312098Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:19:59.312465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706613690814402 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-10T12:19:59.312517Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:59.312802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:59.312838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:59.313367Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-10T12:19:59.313533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706617985782318 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2026-01-10T12:19:59.313569Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:19:59.313833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:19:59.313862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:19:59.314112Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2026-01-10T12:19:59.314509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:19:59.314744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:19:59.314873Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:19:59.314902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-10T12:19:59.314916Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037894 reason = ReasonStop 2026-01-10T12:19:59.315019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:19:59.315195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:19:59.315223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:19:59.315270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:19:59.315497Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7593706613690814523:2594], serverId# [2:7593706613690814524:2595], sessionId# [0:0:0] 2026-01-10T12:19:59.315499Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:19:59.315532Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-10T12:19:59.315579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:19:59.315600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:19:59.315633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-10T12:19:59.315647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-10T12:19:59.315698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:19:59.315931Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:19:59.316011Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:19:59.317611Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037894 2026-01-10T12:19:59.317671Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037894 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:52:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2117] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:91:2057] recipient: [36:88:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2118] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:52:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:105:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:85:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:88:2057] recipient: [45:87:2117] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:90:2057] recipient: [45:87:2117] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:89:2118] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:205:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:85:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:88:2057] recipient: [46:87:2117] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:90:2057] recipient: [46:87:2117] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:89:2118] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:205:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:86:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:89:2057] recipient: [47:88:2117] Leader for TabletID 72057594037927937 is [47:90:2118] sender: [47:91:2057] recipient: [47:88:2117] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2026-01-10T12:19:58.876325Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706613985541886:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.876445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:59.130024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.139017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:59.139122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:59.188148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:59.245299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:59.248654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706613985541782:2081] 1768047598868924 != 1768047598868927 2026-01-10T12:19:59.345901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.418806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:59.493476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.931080Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706625823216262:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:01.931637Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:01.946411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.006682Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:02.008423Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706625823216234:2081] 1768047601929322 != 1768047601929325 2026-01-10T12:20:02.048489Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:02.048561Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:02.054867Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:02.179803Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.262109Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:02.266883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:02.281911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.373737Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:20:02.378922Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:20:02.407296Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:20:02.410666Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:20:02.434889Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.436893Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.437076Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.438516Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.438660Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.439233Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.440049Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.440358Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.440839Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.441556Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.441657Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.442064Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.442738Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.442829Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.443227Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.444241Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.444329Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.444754Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.445456Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.445540Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.445939Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.446592Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.446682Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.447087Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.447704Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.447794Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.448210Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.448834Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.448919Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.449282Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.449888Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.449969Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.450365Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.450996Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-10T12:20:02.451078Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:02.451450Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-10T12:20:02.451999Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:20:02.452028Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:20:02.452193Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:02.452599Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-10T12:20:02.453056Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:20:02.453071Z node 2 :TX_DATASHARD DEBUG: cdc_st ... on.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2026-01-10T12:20:02.720071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1768047602762 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 844 } } CommitVersion { Step: 1768047602762 TxId: 281474976715687 } 2026-01-10T12:20:02.720102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2026-01-10T12:20:02.720200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1768047602762 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 844 } } CommitVersion { Step: 1768047602762 TxId: 281474976715687 } 2026-01-10T12:20:02.720313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1768047602762 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 844 } } CommitVersion { Step: 1768047602762 TxId: 281474976715687 } debug: NTableState::TProposedWaitParts operationId# 281474976715687:0 2026-01-10T12:20:02.720509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706630118184671 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-10T12:20:02.720527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2026-01-10T12:20:02.720607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706630118184671 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-10T12:20:02.720653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2026-01-10T12:20:02.721391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7593706630118184671 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-10T12:20:02.721445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, shard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-10T12:20:02.721461Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-10T12:20:02.721475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-10T12:20:02.721496Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715687:0 129 -> 240 2026-01-10T12:20:02.721688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-10T12:20:02.721733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-10T12:20:02.721759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2026-01-10T12:20:02.721801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-10T12:20:02.721982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-10T12:20:02.722051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-10T12:20:02.722076Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:20:02.722258Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2026-01-10T12:20:02.722318Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-10T12:20:02.722342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:20:02.722469Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-10T12:20:02.722486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-10T12:20:02.722503Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-10T12:20:02.722513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-10T12:20:02.722531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2026-01-10T12:20:02.722569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7593706630118184895:2381] message: TxId: 281474976715687 2026-01-10T12:20:02.722587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-10T12:20:02.722605Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715687:0 2026-01-10T12:20:02.722615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715687:0 2026-01-10T12:20:02.722678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:20:02.723384Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:20:02.723446Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-10T12:20:02.724871Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:20:02.725174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593706630118184671 RawX2: 4503608217307432 } TabletId: 72075186224037890 State: 4 2026-01-10T12:20:02.725244Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:20:02.725607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:20:02.725630Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:20:02.725647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:20:02.727229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:20:02.727452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:20:02.727643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:20:02.727660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:20:02.727725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:20:02.727916Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:20:02.728294Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:20:02.728491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:20:02.728515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:20:02.728535Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:20:02.728560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:20:02.728602Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TFlatTest::RejectByPerRequestSize [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] Test command err: 2026-01-10T12:18:24.315570Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706207599627806:2201];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.315680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:24.347278Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:24.393553Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706211399767500:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:18:24.393610Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:18:24.445993Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:18:24.601268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:24.602045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:18:24.731810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:24.731918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:24.732823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:18:24.732919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:18:24.774520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:24.799627Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:18:24.801726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:18:24.927579Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:24.928055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:18:24.943740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:18:24.952249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/003b30/r3tmp/yandextVG5d7.tmp 2026-01-10T12:18:24.952278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/003b30/r3tmp/yandextVG5d7.tmp 2026-01-10T12:18:24.952497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/003b30/r3tmp/yandextVG5d7.tmp 2026-01-10T12:18:24.952627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:18:25.005380Z INFO: TTestServer started on Port 9710 GrpcPort 6233 PQClient connected to localhost:6233 2026-01-10T12:18:25.288854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:18:25.317803Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:25.430952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:25.442676Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:18:27.771180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706224284669720:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.771180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706224284669711:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.771276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.771535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706224284669726:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.771599Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:18:27.776445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:18:27.796544Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706224284669725:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:18:27.873224Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706224284669754:2182] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:18:28.224836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:18:28.231037Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593706224284669761:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:28.232542Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=Nzc5YjFkZjAtNGE1NjE3YmQtYjU0MWE1NjgtNTVhNGQ2MjA=, ActorId: [2:7593706224284669709:2305], ActorState: ExecuteState, LegacyTraceId: 01kekxfy9s6n08gkekxrpttzxx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:28.235045Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:18:28.237431Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706220484531061:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:18:28.239788Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmNkOWFkYjctYWM0OTE1MTctZDE4YjYzNzEtZjBkMjE4MTM=, ActorId: [1:7593706220484531011:2329], ActorState: ExecuteState, LegacyTraceId: 01kekxfydma3m4pyaf3mq2y039, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:18:28.240172Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' becau ... r_table.cpp:187) 2026-01-10T12:20:01.195876Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:20:01.195907Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:01.469493Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:20:02.029733Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3059208687299993691, "Root", "00595F536F757263655F37", 1768047602719, 1768047602719, 0, 13); 2026-01-10T12:20:02.903622Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-10T12:20:02.903649Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-10T12:20:02.903658Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-10T12:20:02.903684Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-10T12:20:02.903833Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7593706629808044087:4185], Recipient [11:7593706612628173964:3624]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [11:7593706629808044086:4185] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-10T12:20:02.903920Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173964:3624]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "Y_Source_7" 2026-01-10T12:20:02.903996Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [11:7593706612628173964:3624], Recipient [11:7593706629808044086:4185]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-10T12:20:02.904034Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=Y_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-10T12:20:02.904119Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173964:3624]: NActors::TEvents::TEvPoison 2026-01-10T12:20:02.904642Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [11:7593706561088564068:2073], Recipient [11:7593706629808044086:4185]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-10T12:20:02.904676Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) StartKqpSession 2026-01-10T12:20:02.907374Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [11:7593706561088564220:2211], Recipient [11:7593706629808044086:4185]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=11&id=ZDlhMmRlNWUtYjUzZTcwNDEtYjQ0N2NkZTYtMzM3MzhhZDQ=" NodeId: 11 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-10T12:20:02.907406Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Select from the table 2026-01-10T12:20:03.327912Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [11:7593706561088564220:2211], Recipient [11:7593706629808044086:4185]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=ZDlhMmRlNWUtYjUzZTcwNDEtYjQ0N2NkZTYtMzM3MzhhZDQ=" PreparedQuery: "4e7ad9e3-d0a794c6-88bb289b-b385be4d" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kekxjvdj8phpxsjh9fawk6g1" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1768047602719 } items { uint64_value: 1768047602719 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS 2026-01-10T12:20:03.328151Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-10T12:20:03.328191Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:151: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2026-01-10T12:20:03.328358Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7593706634103011448:4185], Recipient [11:7593706612628173963:3623]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [11:7593706629808044086:4185] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-10T12:20:03.328484Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271187968, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173963:3623]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000Y_Source_7" } PipeClient { RawX1: 7593706634103011448 RawX2: 47244644441 } } 2026-01-10T12:20:03.328542Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-10T12:20:03.328625Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173963:3623]: NActors::TEvents::TEvPoison 2026-01-10T12:20:03.328678Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7593706634103011449:4185], Recipient [11:7593706612628173965:3625]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1002 Status: OK ServerId: [11:7593706629808044086:4185] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-10T12:20:03.328745Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173965:3625]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 2 2026-01-10T12:20:03.328819Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [11:7593706612628173965:3625], Recipient [11:7593706629808044086:4185]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-10T12:20:03.328865Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Update the table 2026-01-10T12:20:03.329096Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7593706629808044086:4185], Recipient [11:7593706612628173965:3625]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2 2026-01-10T12:20:03.477753Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [11:7593706561088564220:2211], Recipient [11:7593706629808044086:4185]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=ZDlhMmRlNWUtYjUzZTcwNDEtYjQ0N2NkZTYtMzM3MzhhZDQ=" PreparedQuery: "6d202b0f-6ca37566-ba574e39-207f2e99" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 84 2026-01-10T12:20:03.477813Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-10T12:20:03.477853Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=2, SeqNo=157 2026-01-10T12:20:03.477881Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7593706629808044086:4185] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3059208687299993691 AND Topic = "Root" AND ProducerId = "00595F536F757263655F37" |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: 2026-01-10T12:19:29.402100Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:19:29.484770Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:29.484861Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:29.484949Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.485030Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:19:29.508378Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.532161Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:19:29.533275Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:19:29.535609Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:19:29.538721Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:19:29.540365Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:19:29.548413Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.548931Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b6a51502-fe03f644-dea74226-76e8d5d9_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.569825Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.570409Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|63cbd1c0-6e73d856-b6abe58f-c7f20fee_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.614886Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.615521Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|43a689e7-64c5fc52-bd7bdbca-377b73fa_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:244:2057] recipient: [1:103:2137] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:247:2057] recipient: [1:246:2242] Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:249:2057] recipient: [1:246:2242] 2026-01-10T12:19:29.710596Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:29.710679Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:29.712491Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.712560Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:19:29.713352Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:297:2243] 2026-01-10T12:19:29.715961Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:298:2243] 2026-01-10T12:19:29.724629Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:19:29.725130Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:19:29.725560Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:297:2243] 2026-01-10T12:19:29.725806Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:298:2243] 2026-01-10T12:19:29.734107Z node 1 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-10T12:19:29.736977Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:326:2057] recipient: [1:14:2061] Got start offset = 0 2026-01-10T12:19:30.237663Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-10T12:19:30.296611Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:30.296685Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:30.296749Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:30.296836Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:181:2057] recipient: [2:14:2061] 2026-01-10T12:19:30.319122Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:30.320155Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-10T12:19:30.320938Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:187:2142] 2026-01-10T12:19:30.323784Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:187:2142] 2026-01-10T12:19:30.325845Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:188:2142] 2026-01-10T12:19:30.327994Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:188:2142] 2026-01-10T12:19:30.336008Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:30.336540Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a0629adc-2096706-8a1b6d99-82fd6c5a_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:30.357236Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:30.357822Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|25fe33df-436d1409-f01fd39-76a764_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:30.404150Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:30.404746Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6f1b7c23-9f2dd97c-38089548-e1784bcd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:241:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:244:2057] recipient: [2:243:2239] Leader for TabletID 72057594037927937 is [2:245:2240] sender: [2:246:2057] recipient: [2:243:2239] 2026-01-10T12:19:30.488428Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:30.488511Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:30.489432Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:30.489499Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10 ... 594037927937] Config applied version 55 actor [55:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 } 2026-01-10T12:20:07.623921Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:189:2142] 2026-01-10T12:20:07.626978Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:189:2142] 2026-01-10T12:20:07.629072Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:190:2142] 2026-01-10T12:20:07.631384Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:190:2142] 2026-01-10T12:20:07.642047Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:07.642478Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dc34799a-dd897d98-9182c547-dc819315_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:07.662182Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:07.662806Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a8eb2dfc-9de6c4fb-affb3075-ec469066_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:07.707755Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:07.708371Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2a620819-e861dfb-523813f9-c5e1fa20_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:244:2057] recipient: [55:103:2137] Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:247:2057] recipient: [55:246:2242] Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:249:2057] recipient: [55:246:2242] 2026-01-10T12:20:07.799195Z node 55 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:07.799269Z node 55 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:07.800111Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:07.800167Z node 55 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:07.801269Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:297:2243] 2026-01-10T12:20:07.803948Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:298:2243] 2026-01-10T12:20:07.813775Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:07.814333Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:07.815016Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2243] 2026-01-10T12:20:07.815317Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2243] 2026-01-10T12:20:07.824501Z node 55 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-10T12:20:07.827549Z node 55 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 55 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:326:2057] recipient: [55:14:2061] Got start offset = 0 2026-01-10T12:20:08.191891Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:108:2057] recipient: [56:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:108:2057] recipient: [56:106:2138] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:113:2057] recipient: [56:106:2138] 2026-01-10T12:20:08.258424Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:08.258474Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:08.258512Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:08.258557Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:154:2057] recipient: [56:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:154:2057] recipient: [56:152:2172] Leader for TabletID 72057594037927938 is [56:158:2176] sender: [56:159:2057] recipient: [56:152:2172] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:184:2057] recipient: [56:14:2061] 2026-01-10T12:20:08.277146Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:08.278035Z node 56 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 56 actor [56:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 } 2026-01-10T12:20:08.278810Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:190:2142] 2026-01-10T12:20:08.281730Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [56:190:2142] 2026-01-10T12:20:08.283644Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:191:2142] 2026-01-10T12:20:08.285879Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [56:191:2142] 2026-01-10T12:20:08.293168Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:08.293534Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1087af3b-362ddd7a-df8d30a4-6bec52dc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:08.311027Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:08.311504Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3532fca2-2facba15-6b8407d-357c0ded_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:08.354585Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:08.355082Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4af65f1f-be07abe9-9ef95a38-b2e51cf3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:245:2057] recipient: [56:104:2137] Leader for TabletID 72057594037927937 is [56:112:2142] sender: [56:248:2057] recipient: [56:247:2242] Leader for TabletID 72057594037927937 is [56:249:2243] sender: [56:250:2057] recipient: [56:247:2242] 2026-01-10T12:20:08.433313Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:08.433373Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:08.434118Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:08.434166Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:08.435097Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:298:2243] 2026-01-10T12:20:08.437704Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:299:2243] 2026-01-10T12:20:08.446752Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:08.447252Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:08.447886Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [56:298:2243] 2026-01-10T12:20:08.448138Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [56:299:2243] 2026-01-10T12:20:08.455819Z node 56 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-10T12:20:08.458299Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [56:249:2243] sender: [56:327:2057] recipient: [56:14:2061] Got start offset = 0 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2026-01-10T12:19:48.054229Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706568814749083:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:48.055419Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:48.260205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.345481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:48.345593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:48.351089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:48.362434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:48.463746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.553832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:48.615606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:49.060338Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:53.053496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706568814749083:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:53.053566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:19:55.135310Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002309 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-01-10T12:19:55.135405Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002309 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-01-10T12:19:55.135528Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7593706598879521968:3120] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-01-10T12:19:55.861074Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706602001543806:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:55.861131Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:55.880723Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.969550Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:55.970997Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706602001543777:2081] 1768047595860060 != 1768047595860063 2026-01-10T12:19:56.007643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.007731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.018249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.151943Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.218601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.224657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:56.242084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.868370Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:00.861568Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706602001543806:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.861672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:03.141760Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002245 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-01-10T12:20:03.141835Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002245 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-01-10T12:20:03.141954Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7593706636361284002:3121] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-01-10T12:20:03.866279Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706634047739518:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:03.866331Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:03.880199Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.955392Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:03.957569Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706634047739489:2081] 1768047603865221 != 1768047603865224 2026-01-10T12:20:04.003122Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.003254Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.008404Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:04.105235Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.214979Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:04.234794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.878216Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:07.011480Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7593706634047739738:2104] Handle TEvProposeTransaction 2026-01-10T12:20:07.011531Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7593706634047739738:2104] TxId# 281474976715700 ProcessProposeTransaction 2026-01-10T12:20:07.011583Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [3:7593706634047739738:2104] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7593706651227609976:2797] DataReq marker# P0 2026-01-10T12:20:07.011665Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7593706651227609976:2797] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2026-01-10T12:20:07.012392Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7593706651227609976:2797] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-01-10T12:20:07.012418Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7593706651227609976:2797] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-01-10T12:20:07.012476Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7593706651227609976:2797] txid# 281474976715700 SEND to# [3:7593706634047739774:2121] TSchemeCache with 2 scheme entries. DataReq marker# P2 2026-01-10T12:20:07.012628Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7593706651227609976:2797] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-10T12:20:07.014161Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7593706651227609976:2797] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-10T12:20:07.014487Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7593706651227609976:2797] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-10T12:20:07.015788Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-10T12:20:07.017075Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2026-01-10T12:20:07.017399Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:20:07.018422Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2026-01-10T12:20:07.019046Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-10T12:20:07.019206Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7593706651227609976:2797] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000463 out readset size 0 marker# P6 2026-01-10T12:20:07.019518Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:20:07.019659Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7593706651227609976:2797] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000855 out readset size 0 marker# P6 2026-01-10T12:20:07.019730Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7593706651227609976:2797] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001318 exceeded limit 10000 Status# ExecError 2026-01-10T12:20:07.019807Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7593706651227609976:2797] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2026-01-10T12:20:07.019903Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2026-01-10T12:20:07.019961Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 2026-01-10T12:20:07.020505Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2026-01-10T12:20:07.020547Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> KqpCost::WriteRow-isSink-isOlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot >> KqpCost::WriteRow-isSink+isOlap [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf |91.9%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TLocksTest::Range_BrokenLock1 [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TLocksTest::Range_EmptyKey [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling >> TDataShardLocksTest::UseLocksCache [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling >> TLocksTest::BrokenSameShardLock [GOOD] >> TLocksTest::BrokenLockErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:08.837044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:08.837146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.837179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:08.837210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:08.837243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:08.837271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:08.837330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.837386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:08.838254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:08.838515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:08.918994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:08.919061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:08.934267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:08.934342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:08.934498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:08.940915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:08.941120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:08.941757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:08.942028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:08.943903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:08.944051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:08.945079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:08.945124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:08.945165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:08.945203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:08.945235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:08.945450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.053052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.056959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.057001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.057046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eshard: 72057594046678944, cookie: 281474976710672 2026-01-10T12:20:12.176653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-10T12:20:12.176753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [3:215:2216] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-10T12:20:12.176806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [3:215:2216] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-10T12:20:12.182792Z node 3 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-10T12:20:12.192804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:12.193019Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.193222Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:20:12.193269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:20:12.193453Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:20:12.193520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:12.195336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:12.195498Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:20:12.195699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.195759Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:20:12.195802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:20:12.195836Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:20:12.197398Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.197459Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:20:12.197500Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:20:12.198895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.198946Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.198995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:20:12.199048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:20:12.199175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:20:12.200514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:20:12.200663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:20:12.200951Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:12.201060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904048 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:12.201111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:20:12.201454Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:20:12.201521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:20:12.201701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:20:12.201775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:12.203451Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:12.203503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:12.203689Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:12.203737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:20:12.204108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.204163Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:20:12.204270Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:20:12.204310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:20:12.204353Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:20:12.204388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:20:12.204429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:20:12.204474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:20:12.204515Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:20:12.204549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:20:12.204615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:20:12.204677Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:20:12.204712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:20:12.205270Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:20:12.205371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:20:12.205412Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:20:12.205464Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:20:12.205510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:20:12.205596Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:20:12.208095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:20:12.208564Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TLocksTest::BrokenDupLock >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] >> KqpCost::VectorIndexLookup+useSink >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> KqpCost::IndexLookupJoin-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:20:03.420586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:03.593475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:03.610083Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:03.610993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:03.611167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:04.144245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.145375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.150939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047600417559 != 1768047600417562 2026-01-10T12:20:04.164210Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:04.216781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:04.323436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:04.625978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:04.640105Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:04.754715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.808347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:20:04.809619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:20:04.810683Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-10T12:20:04.810974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:20:04.827070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:20:04.860263Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:20:04.860727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:20:04.861043Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-10T12:20:04.861274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:20:04.869386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:20:04.869972Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:20:04.870100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:20:04.875511Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:20:04.875675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:20:04.875786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:20:04.877513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:20:04.877875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:20:04.877992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-10T12:20:04.878518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:20:04.878627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:20:04.880258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:20:04.880309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:20:04.880343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:20:04.880677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:20:04.880758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:20:04.880810Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-10T12:20:04.891766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:20:04.914971Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:20:04.915207Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:20:04.915322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-10T12:20:04.915355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:20:04.915387Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:20:04.915435Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:20:04.915707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:20:04.915754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:20:04.916611Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:20:04.916669Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:20:04.916738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:20:04.916793Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-10T12:20:04.916818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:20:04.916842Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:20:04.916870Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:20:04.917192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:20:04.917234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:20:04.917366Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:20:04.917998Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:20:04.918134Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:20:04.918706Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:20:04.919332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:20:04.919398Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:20:04.919438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:20:04.919467Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:20:04.919524Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:20:04.919582Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:20:04.919632Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:20:04.919738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:04.919790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:04.920272Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:885:2764], serverId# [1:896:2771], sessionId# [0:0:0] 2026-01-10T12:20:04.920316Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:20:04.920339Z node 1 :TX_DAT ... 24037888 is Executed 2026-01-10T12:20:11.850957Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:20:11.851005Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037888 has finished 2026-01-10T12:20:11.861828Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:20:11.861903Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:20:11.861942Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:20:11.861996Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:20:11.862082Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:20:11.862235Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:1151:2949], Recipient [2:894:2770]: {TEvReadSet step# 3500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-10T12:20:11.862272Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:20:11.862306Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-10T12:20:12.002408Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1193:2975], Recipient [2:1151:2949]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:20:12.002582Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:20:12.002662Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-10T12:20:12.002770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:20:12.002814Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:20:12.002857Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:20:12.002894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:20:12.002999Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-10T12:20:12.003040Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:20:12.003064Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:20:12.003085Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:20:12.003110Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:20:12.003226Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:20:12.003491Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-10T12:20:12.003542Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1193:2975], 0} after executionsCount# 1 2026-01-10T12:20:12.003591Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1193:2975], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:20:12.003671Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1193:2975], 0} finished in read 2026-01-10T12:20:12.003746Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:20:12.003771Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:20:12.003810Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:20:12.003837Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:20:12.003881Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:20:12.003904Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:20:12.003926Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T12:20:12.003962Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:20:12.004063Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:20:12.004925Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1193:2975], Recipient [2:1151:2949]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:20:12.004982Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:20:12.005146Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1193:2975], Recipient [2:894:2770]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-10T12:20:12.005240Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:20:12.005287Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2026-01-10T12:20:12.005337Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-10T12:20:12.005360Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:20:12.005387Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:20:12.005424Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:20:12.005465Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2026-01-10T12:20:12.005492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-10T12:20:12.005514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:20:12.005533Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:20:12.005553Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:20:12.005632Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-10T12:20:12.005806Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-10T12:20:12.005841Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1193:2975], 1} after executionsCount# 1 2026-01-10T12:20:12.005874Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1193:2975], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:20:12.005936Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1193:2975], 1} finished in read 2026-01-10T12:20:12.005975Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-10T12:20:12.005996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:20:12.006065Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:20:12.006088Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:20:12.006131Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-10T12:20:12.006153Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:20:12.006174Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037889 has finished 2026-01-10T12:20:12.006198Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:20:12.006263Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:20:12.007343Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1193:2975], Recipient [2:894:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:20:12.007392Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:08.942167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:08.942252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.942293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:08.942324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:08.942391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:08.942426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:08.942498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.942570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:08.943456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:08.943755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.021393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.021459Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.039665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.039757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.039926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.047083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.047276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.047744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.047901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.049709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.049845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.050750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.050810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.050898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.050942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.050973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.051192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.219813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.220946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.221973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.222051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... , tablet: 72075186233409548, partId: 0 2026-01-10T12:20:12.682401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-10T12:20:12.682493Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-10T12:20:12.682558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.682619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:12.682817Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:20:12.683010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:20:12.683089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:12.687340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.687641Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:12.687687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:20:12.687865Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:12.688105Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:12.688152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:20:12.688198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:20:12.688612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.688699Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:20:12.688830Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:20:12.688876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:12.688937Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:20:12.688979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:12.689028Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:20:12.689077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:12.689128Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:20:12.689167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:20:12.689344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:12.689397Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:20:12.689441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:20:12.689479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:20:12.690902Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:12.691002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:12.691041Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:20:12.691094Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:20:12.691150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:20:12.691666Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:12.691752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:12.691786Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:20:12.691816Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:20:12.691848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:12.691934Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:20:12.695825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:20:12.695916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:20:12.696091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:20:12.696145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:20:12.696463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:20:12.696547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:20:12.696577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:993:2913] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2026-01-10T12:20:12.699970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:12.700248Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:12.700456Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2026-01-10T12:20:12.702761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:12.703042Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:12.703373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:12.703422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:12.703816Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:12.703916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:12.703964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1000:2920] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2026-01-10T12:19:31.247839Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706496311788253:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:31.248610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:31.480376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:31.492250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:31.492350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:31.554489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:31.606305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:31.610002Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706496311788220:2081] 1768047571244561 != 1768047571244564 2026-01-10T12:19:31.685860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:31.781350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:31.788389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:31.864157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.005062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.051734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.519711Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706511522921104:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.519751Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:34.532920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.598337Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:34.601107Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706511522921077:2081] 1768047574518854 != 1768047574518857 2026-01-10T12:19:34.643319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:34.643415Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:34.646907Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:34.698618Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.809122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:34.823685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.871403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.913776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.041708Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706525584519391:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.041779Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.054314Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.117948Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:38.119615Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706525584519364:2081] 1768047578040711 != 1768047578040714 2026-01-10T12:19:38.162410Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:38.162492Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:38.167140Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:38.253856Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.359715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:38.379517Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.432659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.481907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:41.763807Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706541701117673:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:41.763916Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:41.793191Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.862313Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:41.864157Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706541701117620:2081] 1768047581760775 != 1768047581760778 2026-01-10T12:19:41.909728Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:41.909844Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:41.916686Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.069465Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.143583Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.152375Z node 4 :FLAT ... 57594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:53.437098Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:53.601630Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.723810Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:53.729678Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:53.750158Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:53.827550Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:53.887875Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.564629Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706607275331124:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:57.564739Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.589227Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.689938Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:57.708979Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:57.709081Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:57.730490Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:57.885001Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.003423Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.010076Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:58.026654Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.104962Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.179981Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.645083Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706631245407311:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:02.645486Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:02.699112Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.792072Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:02.793876Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706631245407184:2081] 1768047602636483 != 1768047602636486 2026-01-10T12:20:02.806683Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:02.806785Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:02.829063Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:02.906249Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.095960Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.102986Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:03.123285Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.188714Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.268646Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.232972Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706653638056909:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:07.233086Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:07.248653Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.341154Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:07.344105Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706653638056876:2081] 1768047607231699 != 1768047607231702 2026-01-10T12:20:07.394364Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:07.394478Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:07.401318Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:07.457021Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.656050Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:07.678257Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.742879Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.871449Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:09.146476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:09.146565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.146598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:09.146630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:09.146667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:09.146707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:09.146755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.146812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:09.147629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:09.147922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.218708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.218756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.229812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.229916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.230070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.237795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.238069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.238813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.239063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.241583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.241761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.242970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.243029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.243115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.243163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.243211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.243505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.374376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.375931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... X_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2026-01-10T12:20:13.294259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.294312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:13.294508Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:20:13.294682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:20:13.294751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.297780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.298035Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.298068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:20:13.298207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.298386Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.298418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:20:13.298454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:20:13.298773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.298824Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:20:13.298936Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:20:13.298970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:13.299018Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:20:13.299050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:13.299084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:20:13.299126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:20:13.299163Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:20:13.299196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:20:13.299333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.299380Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:20:13.299413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:20:13.299444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:20:13.300551Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:13.300643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:13.300682Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:20:13.300720Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:20:13.300762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:20:13.301167Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:13.301232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:20:13.301259Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:20:13.301284Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:20:13.301314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.301378Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:20:13.304342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:20:13.304439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:20:13.304633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:20:13.304675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:20:13.305024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:20:13.305107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.305148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:993:2913] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } TestModificationResults wait txId: 105 2026-01-10T12:20:13.308442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:13.308685Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.308905Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), at schemeshard: 72057594046678944 2026-01-10T12:20:13.310889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.311140Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:13.311449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:13.311492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:13.311835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.311914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.311952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1000:2920] TestWaitNotification: OK eventTxId 105 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock |92.0%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2026-01-10T12:19:31.448394Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706498737145310:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:31.448480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:31.684018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:31.684752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:31.684845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:31.706191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:31.757866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:31.759092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706498737145269:2081] 1768047571446739 != 1768047571446742 2026-01-10T12:19:31.941933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:31.979006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:32.010730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.118298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.162941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.424607Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706508457027385:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.424701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:34.435404Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.486645Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:34.491794Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706508457027358:2081] 1768047574423918 != 1768047574423921 2026-01-10T12:19:34.494457Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:34.494517Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:34.516069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:34.669615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.675082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:34.689354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.730323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.769405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.401080Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706521648220544:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:37.401135Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:37.410390Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:37.492776Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:37.503128Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706521648220520:2081] 1768047577400679 != 1768047577400682 2026-01-10T12:19:37.508867Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.542642Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:37.542737Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:37.547761Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:37.683764Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:37.716962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:37.743727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.798790Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.846363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.898387Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706537011298246:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:40.898448Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:40.916908Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.983587Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:40.985070Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706537011298218:2081] 1768047580897477 != 1768047580897480 2026-01-10T12:19:41.020817Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:41.020912Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:41.026534Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:41.142997Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.258337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:3 ... lterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:53.944218Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:19:53.949639Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.032278Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.084636Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.887475Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706609633908437:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:57.887550Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.909016Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.011059Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:58.011143Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:58.017564Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:58.020229Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706609633908412:2081] 1768047597886711 != 1768047597886714 2026-01-10T12:19:58.038823Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:58.128811Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.330467Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.336464Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:58.359889Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.434575Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.515660Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.900256Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706629998610391:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:02.901361Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:02.924195Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.025525Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:03.027452Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706629998610363:2081] 1768047602894022 != 1768047602894025 2026-01-10T12:20:03.029596Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:03.029712Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:03.053436Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:03.126162Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.401504Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.407570Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:03.420311Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:03.428734Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.500190Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.554567Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.504382Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706652822009475:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:07.505399Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:07.513168Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:07.606937Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706652822009447:2081] 1768047607498565 != 1768047607498568 2026-01-10T12:20:07.616574Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:07.623488Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.650486Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:07.650614Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:07.666292Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:07.812488Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.959748Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:07.985793Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.058973Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.123534Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::WriteRowInsertFails-isSink-isOlap >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:09.245651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:09.245761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.245802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:09.245834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:09.245895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:09.245926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:09.245982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.246050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:09.246924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:09.247186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.340755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.340821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.355780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.355870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.356068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.366131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.366412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.367242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.367521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.370052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.370250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.371597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.371675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.371766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.371828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.371889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.372164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.518276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.519764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-10T12:20:13.422007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:13.423184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:13.423444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:13.423492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:13.423903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.423951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-10T12:20:13.423991Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.484346Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:13.484518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.484588Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-10T12:20:13.484664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:13.501495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-10T12:20:13.501662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:13.501748Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:13.501806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.501855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:13.502016Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:20:13.502223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.505130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.505887Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.505943Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.506240Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.506289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:13.506705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.506764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:13.506884Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.506927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.506972Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.507015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.507056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:13.507099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.507143Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:13.507183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:13.507334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.507386Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-10T12:20:13.507423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:13.508257Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.508360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.508411Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:13.508454Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:13.508503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.508614Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-10T12:20:13.508661Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:816:2783] 2026-01-10T12:20:13.510728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:20:13.510834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.510875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1028:2947] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-10T12:20:13.514698Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:13.514918Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.515111Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-10T12:20:13.517279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.517529Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:20:13.517818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:20:13.517850Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:20:13.518193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:20:13.518280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.518316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1077:2980] TestWaitNotification: OK eventTxId 106 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TLocksTest::SetLockNothing [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:09.642225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:09.642321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.642377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:09.642414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:09.642469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:09.642502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:09.642561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.642628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:09.643527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:09.643827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.734020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.734080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.749757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.749839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.750004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.758827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.759068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.759751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.759974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.762227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.762408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.763613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.763672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.763738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.763778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.763817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.764033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.919366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.920977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.921524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Step: 250 2026-01-10T12:20:13.598930Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:13.599000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.599054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:13.599243Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:20:13.599461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.602527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.603117Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.603178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.603522Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.603582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:13.604002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.604065Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:13.604218Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.604266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.604313Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.604368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.604419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:13.604467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.604518Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:13.604560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:13.604734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.604788Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-10T12:20:13.604829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:13.605923Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.606040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.606092Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:13.606138Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:13.606189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.606289Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-10T12:20:13.606343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:816:2783] 2026-01-10T12:20:13.610142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:20:13.610263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.610316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1034:2948] TestWaitNotification: OK eventTxId 105 2026-01-10T12:20:13.610915Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:20:13.611206Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 317us result status StatusSuccess 2026-01-10T12:20:13.612111Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TLocksTest::GoodSameShardLock [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2026-01-10T12:19:32.407908Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706502434000006:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:32.408000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:32.606095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:32.610975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:32.611075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:32.615503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:32.679279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:32.683123Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706502433999982:2081] 1768047572407039 != 1768047572407042 2026-01-10T12:19:32.781896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:32.881484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:32.961685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.091235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:33.131434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:35.380465Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706516065406334:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:35.380514Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:35.394447Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:35.453026Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:35.455107Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706516065406307:2081] 1768047575379460 != 1768047575379463 2026-01-10T12:19:35.496752Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:35.496817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:35.501013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:35.585900Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:35.651754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:35.671801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:35.718011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:35.759421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.848015Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706526405750219:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.848440Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.871095Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.954294Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:38.984389Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:38.984485Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:38.990699Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:39.075502Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:39.156007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:39.171622Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.228603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.280938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.389520Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706546509266897:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:42.390169Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:42.440178Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.491834Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:42.521091Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:42.521173Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:42.532135Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.697195Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.767783Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.774724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.790780Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.842275Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemesha ... cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:54.127171Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:54.158790Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:54.193601Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:54.389316Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:54.397029Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:54.413235Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:19:54.418892Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.486230Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.537865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.363523Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706612348320337:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.363647Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:58.400234Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.499668Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:58.537351Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:58.537447Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:58.546377Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:58.678727Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.811067Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.819482Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:58.834328Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.903560Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.957630Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.006053Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706635638288083:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:03.006144Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:03.046965Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.136802Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:03.153574Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:03.153687Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:03.177181Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:03.333252Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.462310Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.485393Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.554366Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.609572Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.045334Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706657840230110:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:08.045404Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:08.073802Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.146513Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:08.155193Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706657840230070:2081] 1768047608044381 != 1768047608044384 2026-01-10T12:20:08.183641Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:08.183766Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:08.202761Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:08.271196Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.480144Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:08.504348Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.579573Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.639848Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:09.022342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:09.022448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.022482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:09.022515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:09.022566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:09.022594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:09.022649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.022713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:09.023555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:09.023859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.115422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.115510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.131553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.131666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.131844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.140723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.140977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.141669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.141901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.144130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.144324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.145602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.145663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.145731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.145776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.145818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.146080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.281501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.282987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.283564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-10T12:20:13.853353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:13.872796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:13.873074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:13.873125Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:13.873568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.873622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-10T12:20:13.873665Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.931002Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:13.931151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.931205Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-10T12:20:13.931268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:13.948857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-10T12:20:13.949010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:13.949093Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:13.949158Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.949206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:13.949357Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:20:13.949531Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.952267Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.953154Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.953224Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.953552Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.953614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:13.954219Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.954272Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:13.954376Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.954412Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.954451Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.954500Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.954539Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:13.954582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.954624Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:13.954657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:13.954790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.954824Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-10T12:20:13.954857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:13.955640Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.955761Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.955811Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:13.955856Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:13.955896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.956003Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-10T12:20:13.956042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:816:2783] 2026-01-10T12:20:13.959622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:20:13.959727Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.959777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1033:2952] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-10T12:20:13.963731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:13.963989Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.964227Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-10T12:20:13.966659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.966975Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:20:13.967344Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:20:13.967400Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:20:13.967873Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:20:13.967972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.968019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1084:2987] TestWaitNotification: OK eventTxId 106 >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:08.959033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:08.959119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.959149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:08.959177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:08.959222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:08.959258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:08.959306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.959371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:08.960248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:08.960560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.032026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.032120Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.047510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.047626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.047816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.055281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.055504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.056141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.056324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.058645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.058839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.060090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.060158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.060267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.060322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.060374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.060664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.183646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.184907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.185921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.186016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.186114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.186189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... chemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.518433Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.518475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.518732Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.518770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:13.519463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.519514Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:13.519635Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.519690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.519737Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.519780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.519826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:13.519877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.519922Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:13.519962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:13.520130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.520186Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-10T12:20:13.520229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:13.521506Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.521596Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.521643Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:13.521687Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:13.521735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.521831Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:20:13.524879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:13.525147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:13.525197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:13.525622Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.525723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.525768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1074:2984] TestWaitNotification: OK eventTxId 105 2026-01-10T12:20:14.098362Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:20:14.098702Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 372us result status StatusSuccess 2026-01-10T12:20:14.099443Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2026-01-10T12:20:14.103144Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:20:14.103369Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:20:14.103528Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2026-01-10T12:20:14.105879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:14.106116Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:20:14.106396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:20:14.106435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:20:14.106785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:20:14.106860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:20:14.106891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1089:2998] TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:08.938454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:08.938568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.938610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:08.938647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:08.938700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:08.938731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:08.938778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:08.938847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:08.939799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:08.940108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.029969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.030045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.045734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.045829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.046000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.053920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.054154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.054815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.055020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.057013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.057171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.058414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.058479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.058556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.058608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.058651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.058926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.190996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.191844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.191934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.191994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.192912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:13.862774Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 1250, at tablet: 72057594046678944 2026-01-10T12:20:13.862824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:13.870869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-10T12:20:13.871007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2026-01-10T12:20:13.871092Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2026-01-10T12:20:13.871146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.871189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:13.871350Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:20:13.871542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.874837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.878507Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:13.878567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:13.878798Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:13.878830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:13.879570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:13.879625Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:13.879762Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.879800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.879845Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:13.879876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.879913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:13.879957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:13.880000Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:13.880042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:13.880254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:13.880306Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-10T12:20:13.880342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:13.881469Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.881544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:13.881592Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:13.881637Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:13.881677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:13.881770Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:20:13.884902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:20:13.885130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:20:13.885189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:20:13.885552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:20:13.885650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:13.885697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1083:2991] TestWaitNotification: OK eventTxId 105 2026-01-10T12:20:14.316858Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:20:14.317209Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 377us result status StatusSuccess 2026-01-10T12:20:14.317920Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:20:09.214626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:20:09.214732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.214771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:20:09.214804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:20:09.214845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:20:09.214876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:20:09.214930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:20:09.214995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:20:09.215971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:20:09.216287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:20:09.285898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:20:09.285942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.296998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:20:09.297089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:20:09.297235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:20:09.303784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:20:09.303972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:20:09.304491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:20:09.304663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:20:09.306436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.306610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:20:09.307631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:09.307688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:09.307739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:20:09.307768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:20:09.307802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:20:09.308009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:20:09.415100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.415719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.415800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.415852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.415910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.415966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:20:09.416443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1-10T12:20:14.379821Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:20:14.379889Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-10T12:20:14.379953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:20:14.395630Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-10T12:20:14.395867Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:14.395973Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-10T12:20:14.396060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:20:14.396131Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:20:14.396313Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:20:14.396534Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:14.399946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:14.400491Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:20:14.400549Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:20:14.400878Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:20:14.400937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:20:14.401901Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:20:14.401989Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:20:14.402118Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:14.402163Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:14.402209Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:20:14.402250Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:14.402294Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:20:14.402355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:20:14.402400Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:20:14.402434Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:20:14.402583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:20:14.402632Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-10T12:20:14.402680Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:20:14.404230Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:14.404349Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:20:14.404398Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:20:14.404462Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:20:14.404510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:20:14.404622Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-10T12:20:14.404666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:816:2783] 2026-01-10T12:20:14.407912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:20:14.408019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:20:14.408065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1033:2952] TestWaitNotification: OK eventTxId 105 2026-01-10T12:20:14.408726Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:20:14.408999Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 306us result status StatusSuccess 2026-01-10T12:20:14.409742Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 4 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 4 NextPartitionId: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan >> KqpCost::IndexLookup+useSink >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts |92.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2026-01-10T12:19:48.679028Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706571271159961:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:48.679116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:48.878322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:48.878411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:48.880446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:48.917401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:48.965100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:48.966287Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706571271159931:2081] 1768047588677095 != 1768047588677098 2026-01-10T12:19:49.099963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:49.151242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:49.234415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:49.351603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:49.394233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.762839Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706583653016807:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:51.762881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:51.778766Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.851976Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706583653016780:2081] 1768047591761780 != 1768047591761783 2026-01-10T12:19:51.859710Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:51.899814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:51.899895Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:51.906518Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:52.031511Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:52.086913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:52.107174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:52.156599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:52.196793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.123640Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706598928043197:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:55.123708Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:55.137712Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.205947Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:55.207124Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706598928043170:2081] 1768047595122675 != 1768047595122678 2026-01-10T12:19:55.245474Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:55.245559Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:55.252138Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:55.363636Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.439698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:55.448269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:55.461974Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.523279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.566328Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.848178Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706614043373470:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.849265Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:58.892155Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.995073Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:59.007734Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706614043373442:2081] 1768047598842503 != 1768047598842506 2026-01-10T12:19:59.036363Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:59.036482Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:59.055928Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:59.106834Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.255725Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:59.277461Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.351689Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.427579Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.480577Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593706629094480250:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:02.481812Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:02.501300Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.579238Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:02.579536Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593706629094480147:2081] 1768047602477154 != 1768047602477157 2026-01-10T12:20:02.622920Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:02.623008Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:02.632006Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:02.745860Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.857288Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:02.869130Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:02.892168Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.963145Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.027664Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.162992Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593706649717373996:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:06.163031Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:06.174278Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.244136Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:06.245500Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593706649717373963:2081] 1768047606162204 != 1768047606162207 2026-01-10T12:20:06.275535Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:06.275605Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:06.277310Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:06.360060Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.467150Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:06.489211Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.560707Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.612433Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:10.561716Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593706663663907558:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:10.561765Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:10.577181Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:10.640286Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:10.642685Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593706663663907531:2081] 1768047610560894 != 1768047610560897 2026-01-10T12:20:10.675036Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:10.675123Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:10.679847Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:10.750321Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:10.928355Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:10.944964Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:10.999057Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.050209Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpCost::QuerySeviceRangeFullScan >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpCost::VectorIndexLookup-useSink |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2026-01-10T12:19:33.951460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706506765012992:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:33.951571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:34.131687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.154660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:34.154731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:34.190813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:34.223831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:34.226724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706506765012964:2081] 1768047573949996 != 1768047573949999 2026-01-10T12:19:34.358984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:34.378908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:34.409597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.516235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:34.555800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:36.654023Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706518418111311:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:36.654083Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:36.670428Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.734587Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:36.735880Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706518418111287:2081] 1768047576653070 != 1768047576653073 2026-01-10T12:19:36.770913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:36.770986Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:36.774116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:36.838390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.971508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:36.985334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.032008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:37.073397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.613074Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706531375612990:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:39.613142Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:39.622072Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:39.707828Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:39.709409Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:39.710260Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706531375612965:2081] 1768047579612689 != 1768047579612692 2026-01-10T12:19:39.745451Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:39.745537Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:39.751218Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:39.906083Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:39.941540Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:39.963127Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.024142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:40.082553Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:43.287362Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706548457734294:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:43.287421Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:43.300144Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:43.360478Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:43.396278Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:43.396364Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:43.399829Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:43.475507Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:43.563108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:43.580176Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... -01-10T12:19:55.419247Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:55.422713Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593706598303289979:2081] 1768047595305233 != 1768047595305236 2026-01-10T12:19:55.451790Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:55.451881Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:55.461267Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:55.519173Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.694589Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:55.717611Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.787272Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.842441Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.612030Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706617108088424:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:59.612140Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:59.646244Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.742596Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706617108088397:2081] 1768047599609145 != 1768047599609148 2026-01-10T12:19:59.758670Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:59.773777Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:59.773889Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:59.779459Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:59.921276Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.007460Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:00.025371Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.092052Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.141881Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.781317Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706637885061971:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:04.781390Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:04.797864Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.888638Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:04.923963Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.924088Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.929694Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:05.066923Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:05.161046Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:05.176545Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.231136Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.281441Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:09.749943Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706658511990677:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:09.750045Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:09.766401Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:09.857102Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.867163Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706658511990649:2081] 1768047609748976 != 1768047609748979 2026-01-10T12:20:09.901445Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:09.901562Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:09.905191Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:09.994903Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:10.179382Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:10.203547Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:10.266890Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:10.317900Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> KqpCost::WriteRow+isSink-isOlap |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink >> KqpCost::PointLookup |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2026-01-10T12:19:35.587113Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706514143527816:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:35.587196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:35.773347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:35.792465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:35.792566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:35.847847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:35.850681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:35.850983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706514143527787:2081] 1768047575585469 != 1768047575585472 2026-01-10T12:19:35.995179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:36.030906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:36.119585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:36.244597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:36.285837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:38.634200Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706528511321329:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.634252Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.649216Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.717792Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:38.719219Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706528511321302:2081] 1768047578633176 != 1768047578633179 2026-01-10T12:19:38.754996Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:38.755068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:38.758886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:38.873794Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.938084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:38.954181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.010500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.053155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.020604Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706543856988068:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:42.023674Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:42.063135Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.151384Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:42.156474Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706543856988035:2081] 1768047582018370 != 1768047582018373 2026-01-10T12:19:42.180744Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:42.180858Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:42.196744Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.252415Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.404127Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.409343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.427877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.499782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:42.555297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:45.833719Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706556810439013:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:45.833798Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:45.845987Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:45.932561Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:45.947758Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:45.947845Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:45.968564Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:46.075966Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:46.154687Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:46.160211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:46.170180Z node 4 :FLAT_TX_SCHEMESHAR ... s: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.727466Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:57.734546Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:57.745803Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:19:57.751054Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.819486Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.874784Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.877129Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706627723907476:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:01.878195Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:01.890948Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.995353Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:02.014552Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:02.014657Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:02.039231Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:02.189465Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.272123Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:02.290534Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:20:02.298946Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.361465Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.426029Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.420857Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706646983055516:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:06.420924Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:06.441671Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.530790Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:06.545171Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706646983055489:2081] 1768047606420066 != 1768047606420069 2026-01-10T12:20:06.547555Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:06.547666Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:06.580975Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:06.664747Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.829884Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:06.858313Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.924972Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.981002Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.204306Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706670608277776:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:11.204387Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:11.231868Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.313741Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:11.315456Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706670608277747:2081] 1768047611203056 != 1768047611203059 2026-01-10T12:20:11.329241Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:11.329350Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:11.357163Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:11.402833Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.599237Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:11.605247Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:11.612150Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:11.617783Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.685624Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.745168Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpCost::CTAS-isOlap >> KqpCost::WriteRow+isSink+isOlap [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> KqpCost::IndexLookup-useSink >> KqpCost::CTASWithRetry+isOlap >> KqpCost::Range >> KqpCost::OlapWriteRow >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> KqpCost::WriteRow-isSink-isOlap [GOOD] >> KqpCost::OlapPointLookup [GOOD] >> KqpCost::IndexLookupAndTake+useSink >> KqpCost::IndexLookupJoin+StreamLookupJoin >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1465, MsgBus: 11590 2026-01-10T12:20:11.574986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706667621529637:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:11.575058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:11.614900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:11.876096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:11.876181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:11.905665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:11.935283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:11.938015Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706667621529612:2081] 1768047611573573 != 1768047611573576 2026-01-10T12:20:11.959670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:12.085113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:12.085610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:12.085649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:12.086149Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:12.159567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:12.548803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:12.586342Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:12.606793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.745061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.878305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.942000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.734723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706680506433372:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.734837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.736207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706680506433382:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.736257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.236225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.271144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.302043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.334774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.366414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.402320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.438230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.486290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.590294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706684801401550:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706684801401555:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706684801401557:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.598856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:15.611062Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706684801401559:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:15.672680Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706684801401612:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:16.575312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706667621529637:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.575384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:17.658012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 450 cpu_time_us: 170 } query_phases { duration_us: 6849 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 195 affected_shards: 1 } compilation { duration_us: 89147 cpu_time_us: 81871 } process_cpu_time_us: 967 total_duration_us: 99572 total_cpu_time_us: 83203 query_phases { duration_us: 457 cpu_time_us: 184 } query_phases { duration_us: 5126 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 259 affected_shards: 1 } compilation { duration_us: 92990 cpu_time_us: 86048 } process_cpu_time_us: 1059 total_duration_us: 101481 total_cpu_time_us: 87550 2026-01-10T12:20:18.531967Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706697686303942:2561], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxka2c1nxadr4czt1cnzvd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTljNmJkYTItZmE5MTYzZTktZmJlODk1MC04NTg4MWI1Mg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:20:18.532949Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706697686303943:2562], TxId: 281474976710678, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxka2c1nxadr4czt1cnzvd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NTljNmJkYTItZmE5MTYzZTktZmJlODk1MC04NTg4MWI1Mg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593706697686303939:2522], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:18.533437Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTljNmJkYTItZmE5MTYzZTktZmJlODk1MC04NTg4MWI1Mg==, ActorId: [1:7593706693391336466:2522], ActorState: ExecuteState, LegacyTraceId: 01kekxka2c1nxadr4czt1cnzvd, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 710 cpu_time_us: 298 } query_phases { duration_us: 13490 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 4968 affected_shards: 1 } query_phases { duration_us: 19200 cpu_time_us: 1081 } compilation { duration_us: 366474 cpu_time_us: 357877 } process_cpu_time_us: 1822 total_duration_us: 408332 total_cpu_time_us: 366046 query_phases { duration_us: 734 cpu_time_us: 313 } query_phases { duration_us: 5343 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 484 affected_shards: 1 } query_phases { duration_us: 1533 cpu_time_us: 236 } query_phases { duration_us: 3159 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 692 affected_shards: 1 } compilation { duration_us: 256749 cpu_time_us: 250231 } process_cpu_time_us: 1783 total_duration_us: 272368 total_cpu_time_us: 253739 query_phases { duration_us: 612 cpu_time_us: 245 } query_phases { duration_us: 4970 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 2193 affected_shards: 1 } query_phases { duration_us: 1091 } compilation { duration_us: 241299 cpu_time_us: 230551 } process_cpu_time_us: 1440 total_duration_us: 251403 total_cpu_time_us: 234429 query_phases { duration_us: 623 cpu_time_us: 278 } query_phases { duration_us: 2916 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 533 affected_shards: 1 } query_phases { duration_us: 2990 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 493 affected_shards: 1 } compilation { duration_us: 178839 cpu_time_us: 173091 } process_cpu_time_us: 1426 total_duration_us: 189540 total_cpu_time_us: 175821 query_phases { duration_us: 490 cpu_time_us: 204 } query_phases { duration_us: 2957 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 542 affected_shards: 1 } compilation { duration_us: 72672 cpu_time_us: 67966 } process_cpu_time_us: 1023 total_duration_us: 77601 total_cpu_time_us: 69735 query_phases { duration_us: 370 cpu_time_us: 168 } query_phases { duration_us: 3751 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 194 affected_shards: 1 } compilation { duration_us: 75035 cpu_time_us: 68637 } process_cpu_time_us: 899 total_duration_us: 81385 total_cpu_time_us: 69898 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3256, MsgBus: 22694 2026-01-10T12:20:11.579127Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706669229720444:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:11.579195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:11.873599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:11.873728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:11.921249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:11.936826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:11.943728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:11.946080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706669229720412:2081] 1768047611577657 != 1768047611577660 2026-01-10T12:20:12.085111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:12.085813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:12.085821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:12.085960Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:12.187384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:12.553146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:12.560730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:12.591623Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:12.613809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.741464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.885218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.949015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.807171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706682114624178:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.807365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.807937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706682114624188:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:14.808040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.236234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.271144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.303148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.335353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.366525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.400697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.441108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.510799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.591184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706686409592353:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706686409592358:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.591543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.592003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706686409592360:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:15.598683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:15.611669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706686409592362:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:15.679460Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706686409592413:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:16.580344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706669229720444:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.580434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:17.684566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 86224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-10T12:20:18.077705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-10T12:20:18.079006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7593706694999527403:2540];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976710673;this=136770712336832;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618078;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.080614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7593706694999527384:2534];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976710673;this=136770714179680;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618079;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.083415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593706694999527385:2535];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976710673;this=136770714177888;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618083;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.084024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7593706694999527395:2538];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976710673;this=136770714177216;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618083;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.085026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7593706694999527410:2541];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=136770714176544;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618084;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.085757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7593706694999527388:2537];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=136770714175872;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618085;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.086990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7593706694999527383:2533];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976710673;this=136770713127776;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618086;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.087949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7593706694999527396:2539];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710673;this=136770713129344;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047618087;max=18446744073709551615;plan=0;src=[1:7593706669229720763:2146];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.097186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.097317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.097343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.099875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.099931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.099948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.143869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.143941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.143957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.145539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.145591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.145606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.167978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.168547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.168607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.168624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.176267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.176299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.181292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.181357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.181374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.188427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.188503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.188519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.195206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.195271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.195289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.197418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.197485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:18.197501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> TLocksTest::Range_GoodLock1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 17455, MsgBus: 17179 2026-01-10T12:20:14.084988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706682309429090:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:14.085095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:14.340031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:14.345947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:14.346080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:14.408914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:14.434192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:14.456004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706682309429061:2081] 1768047614083491 != 1768047614083494 2026-01-10T12:20:14.519703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:14.519760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:14.519773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:14.519919Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:14.556111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:14.939471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:14.946269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:15.011451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.097248Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:15.146825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.296660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.371946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.482253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706695194332831:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.482389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.482833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706695194332841:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.482898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.852610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.894688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.938392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.974735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.010046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.077470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.135198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.193700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.268627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699489301007:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.268707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.269079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699489301012:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.269141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699489301013:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.269177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.273175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:18.288271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706699489301016:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:18.362147Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706699489301067:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:19.085166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706682309429090:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.085260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:20.435311Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047620451, txId: 281474976715673] shutting down |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpCost::AAARangeFullScan [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2391, MsgBus: 15154 2026-01-10T12:20:13.836410Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706677820517170:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:13.836785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:14.101405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:14.108288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:14.108407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:14.136030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:14.195390Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:14.200186Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706677820517141:2081] 1768047613833034 != 1768047613833037 2026-01-10T12:20:14.258184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:14.258212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:14.258220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:14.258332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:14.365552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:14.687146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:14.741056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.846634Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:14.875069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.046197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.109212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.195031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706695000388200:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.195206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.195600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706695000388209:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.195641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.569102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.610621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.641717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.721543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.775879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.851367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.893384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.939091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.019235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699295356384:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.019319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.019519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699295356390:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.019560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699295356389:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.019592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.023446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:18.034425Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706699295356393:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:18.105995Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706699295356444:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:18.835161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706677820517170:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:18.835261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:19.899721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.937439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.972917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> KqpCost::OlapRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:82:2113] Leader for TabletID 72057594037927937 is [45:82:2113] sender: [45:198:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:78:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:81:2057] recipient: [46:80:2112] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:83:2057] recipient: [46:80:2112] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:82:2113] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:198:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:79:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:82:2057] recipient: [47:81:2112] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:84:2057] recipient: [47:81:2112] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:83:2113] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:199:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:52:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:82:2057] recipient: [48:39:2086] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:85:2057] recipient: [48:84:2115] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:87:2057] recipient: [48:84:2115] !Reboot 72057594037927937 (actor [48:58:2099]) rebooted! !Reboot 72057594037927937 (actor [48:58:2099]) tablet resolver refreshed! new actor is[48:86:2116] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:202:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:59:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:76:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:82:2057] recipient: [49:39:2086] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:85:2057] recipient: [49:84:2115] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:87:2057] recipient: [49:84:2115] !Reboot 72057594037927937 (actor [49:58:2099]) rebooted! !Reboot 72057594037927937 (actor [49:58:2099]) tablet resolver refreshed! new actor is[49:86:2116] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:202:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:54:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:59:2057] recipient: [50:54:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:76:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:83:2057] recipient: [50:39:2086] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:86:2057] recipient: [50:85:2115] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:88:2057] recipient: [50:85:2115] !Reboot 72057594037927937 (actor [50:58:2099]) rebooted! !Reboot 72057594037927937 (actor [50:58:2099]) tablet resolver refreshed! new actor is[50:87:2116] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:203:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:59:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:76:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:86:2057] recipient: [51:39:2086] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:89:2057] recipient: [51:88:2118] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:91:2057] recipient: [51:88:2118] !Reboot 72057594037927937 (actor [51:58:2099]) rebooted! !Reboot 72057594037927937 (actor [51:58:2099]) tablet resolver refreshed! new actor is[51:90:2119] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:206:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:59:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:76:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:86:2057] recipient: [52:39:2086] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:89:2057] recipient: [52:88:2118] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:91:2057] recipient: [52:88:2118] !Reboot 72057594037927937 (actor [52:58:2099]) rebooted! !Reboot 72057594037927937 (actor [52:58:2099]) tablet resolver refreshed! new actor is[52:90:2119] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:206:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:52:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:59:2057] recipient: [53:52:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:76:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:87:2057] recipient: [53:39:2086] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:90:2057] recipient: [53:89:2118] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:92:2057] recipient: [53:89:2118] !Reboot 72057594037927937 (actor [53:58:2099]) rebooted! !Reboot 72057594037927937 (actor [53:58:2099]) tablet resolver refreshed! new actor is[53:91:2119] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:207:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:59:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:76:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:90:2057] recipient: [54:39:2086] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:93:2057] recipient: [54:92:2121] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:95:2057] recipient: [54:92:2121] !Reboot 72057594037927937 (actor [54:58:2099]) rebooted! !Reboot 72057594037927937 (actor [54:58:2099]) tablet resolver refreshed! new actor is[54:94:2122] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:210:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:59:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:76:2057] recipient: [55:14:2061] !Reboot 72057594037927937 (actor [55:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:90:2057] recipient: [55:39:2086] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:93:2057] recipient: [55:92:2121] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:95:2057] recipient: [55:92:2121] !Reboot 72057594037927937 (actor [55:58:2099]) rebooted! !Reboot 72057594037927937 (actor [55:58:2099]) tablet resolver refreshed! new actor is[55:94:2122] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:210:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:54:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:59:2057] recipient: [56:54:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:76:2057] recipient: [56:14:2061] !Reboot 72057594037927937 (actor [56:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:91:2057] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:94:2057] recipient: [56:93:2121] Leader for TabletID 72057594037927937 is [56:95:2122] sender: [56:96:2057] recipient: [56:93:2121] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpCost::IndexLookup+useSink [GOOD] >> KqpCost::PointLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2026-01-10T12:19:40.532105Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706535136342330:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:40.532676Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:40.741267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:40.741415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:40.744572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:40.801470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:40.820142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706535136342302:2081] 1768047580530420 != 1768047580530423 2026-01-10T12:19:40.826215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:41.011719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:41.029730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:19:41.064217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.129390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:41.256340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:41.307407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:43.962092Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706549891402083:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:43.963982Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:43.974602Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.034677Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:44.085138Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:44.085219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:44.087373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:44.217594Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.264481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:44.283040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.354308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.399536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:47.589601Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706565216311071:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.589666Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:47.607077Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.691654Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:47.707130Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:47.707219Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:47.731031Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:47.775589Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.902921Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.922111Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:47.980829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.041956Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.179402Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706584274412500:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:51.181639Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:51.212743Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.286415Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:51.295039Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706584274412473:2081] 1768047591177011 != 1768047591177014 2026-01-10T12:19:51.322049Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:51.322206Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:51.332287Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:51.449572Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.521364Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:51.541010Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.598009Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemesha ... 09Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.655029Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.662141Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:03.682036Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.751167Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.801033Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.896067Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706650684567748:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:07.896153Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:07.922090Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.019765Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:08.021230Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706650684567719:2081] 1768047607894747 != 1768047607894750 2026-01-10T12:20:08.062566Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:08.062671Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:08.069006Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:08.177789Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.300239Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:08.322590Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.396760Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.455787Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:12.511770Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706675354654190:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:12.511836Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:12.532934Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:12.629978Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:12.630094Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:12.632211Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:12.633830Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706675354654157:2081] 1768047612510776 != 1768047612510779 2026-01-10T12:20:12.674756Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:12.777422Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:12.941267Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:12.965575Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:13.044974Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:13.107919Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.350050Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706696455074392:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:17.350145Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:17.373120Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:17.472364Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:17.473926Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706696455074353:2081] 1768047617349620 != 1768047617349623 2026-01-10T12:20:17.488926Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:17.489035Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:17.521156Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:17.568175Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:17.877816Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.885022Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.897421Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:17.902928Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.987345Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.054141Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:19:08.576726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:19:08.576811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:08.576844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:19:08.576878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:19:08.576909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:19:08.576936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:19:08.577007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:08.577076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:19:08.577727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:19:08.577973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:19:08.639679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:19:08.639738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:08.650636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:19:08.650729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:19:08.650953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:19:08.658360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:19:08.658563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:19:08.659179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:08.659355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:19:08.661132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:08.661325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:19:08.662336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:19:08.662381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:08.662440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:19:08.662492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:19:08.662534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:19:08.662727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:19:08.770453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.771900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-10T12:20:23.248815Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:728:2709]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:23.248977Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-10T12:20:23.249373Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:728:2709], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 31 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 131 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-10T12:20:23.249430Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:23.249482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0031 2026-01-10T12:20:23.249604Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:23.249653Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:20:23.260121Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:729:2710]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:23.260219Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:23.260313Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000038 last cleanup 0 2026-01-10T12:20:23.260377Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:20:23.260410Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2026-01-10T12:20:23.260460Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2026-01-10T12:20:23.260495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2026-01-10T12:20:23.260648Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:729:2710]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:23.260781Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 38 2026-01-10T12:20:23.261132Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:729:2710], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 131 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-10T12:20:23.261178Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:23.261226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0027 2026-01-10T12:20:23.261336Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:23.313396Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:23.313474Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:23.313528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-10T12:20:23.313617Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-10T12:20:23.313653Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-10T12:20:23.313772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-10T12:20:23.313831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-10T12:20:23.313863Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-10T12:20:23.313938Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2026-01-10T12:20:23.314003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 2 out of 2 partitions 2026-01-10T12:20:23.314052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-10T12:20:23.314081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:20:23.314104Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-10T12:20:23.314141Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-10T12:20:23.314196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409547 by load, its table already has 2 out of 2 partitions 2026-01-10T12:20:23.314259Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:20:23.324756Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:23.324848Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:23.324881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:20:23.347841Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1735:3654], Recipient [3:728:2709]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:23.347926Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:23.347972Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409546, clientId# [3:1734:3653], serverId# [3:1735:3654], sessionId# [0:0:0] 2026-01-10T12:20:23.348311Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1733:3652], Recipient [3:728:2709]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } 2026-01-10T12:20:23.350823Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1738:3657], Recipient [3:729:2710]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:23.350894Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:23.350935Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409547, clientId# [3:1737:3656], serverId# [3:1738:3657], sessionId# [0:0:0] 2026-01-10T12:20:23.351073Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1736:3655], Recipient [3:729:2710]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 3729, MsgBus: 1816 2026-01-10T12:20:16.196335Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706688563569518:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.196654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:16.432200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:16.448953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:16.449063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:16.499598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:16.542008Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:16.545352Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706688563569413:2081] 1768047616187529 != 1768047616187532 2026-01-10T12:20:16.603728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:16.621093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:16.621121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:16.621136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:16.621251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:17.075714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.083168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.135663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.217724Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:17.290453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.463156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.533040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.505104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706701448473170:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.505214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.505528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706701448473180:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.505583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.939237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.019284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.064573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.102798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.169654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.207116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.240606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.283650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.362057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705743441349:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.362138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.362353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705743441355:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.362387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705743441354:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.362402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.366078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:20.376911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706705743441358:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:20.450500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706705743441409:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:21.195369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706688563569518:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.195449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:22.274027Z node 1 :KQP_GATEWAY DEBUG: kqp_metadata_loader.cpp:922: Load table metadata from cache by path, request Path: /Root/Test 2026-01-10T12:20:22.4 ... 74976715674, task: 1, CA Id [1:7593706714333376306:2530]. Recv TEvReadResult from ShardID=72075186224037914, ReadId=0, Status=SUCCESS, Finished=1, RowCount=3, TxLocks= , BrokenTxLocks= 2026-01-10T12:20:22.454172Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. Taken 0 locks 2026-01-10T12:20:22.454183Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:20:22.454196Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593706714333376306:2530], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 276037645 2026-01-10T12:20:22.454208Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593706714333376306:2530], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2026-01-10T12:20:22.454218Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:20:22.454235Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. enter pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:20:22.454274Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. exit pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 3 freeSpace: 8388548 2026-01-10T12:20:22.454295Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. returned 3 rows; processed 3 rows 2026-01-10T12:20:22.454352Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. dropping batch for read #0 2026-01-10T12:20:22.454365Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. effective maxinflight 1 sorted 1 2026-01-10T12:20:22.454365Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593706714333376307:2531], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646926 2026-01-10T12:20:22.454377Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:20:22.454390Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715674, task: 1, CA Id [1:7593706714333376306:2530]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:20:22.454506Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [1:7593706714333376307:2531], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593706714333376306 RawX2: 4503603922340322 } } DstEndpoint { ActorId { RawX1: 7593706714333376307 RawX2: 4503603922340323 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593706714333376307 RawX2: 4503603922340323 } } DstEndpoint { ActorId { RawX1: 7593706714333376302 RawX2: 4503603922340314 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:20:22.454534Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [1:7593706714333376307:2531], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Update input channelId: 1, peer: [1:7593706714333376306:2530] 2026-01-10T12:20:22.454554Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593706714333376306:2530], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:20:22.454666Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593706714333376307:2531], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxke27e7xyg54zncwz5vkp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:20:22.454758Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:20:22.454810Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:20:22.454899Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:20:22.454944Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:20:22.455149Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047622 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-10T12:20:22.455168Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7593706714333376306:2530] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 8305 Tasks { TaskId: 1 CpuTimeUs: 1132 FinishTimeMs: 1768047622454 OutputRows: 1 OutputBytes: 20 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ResultRows: 1 ResultBytes: 20 ComputeCpuTimeUs: 208 BuildCpuTimeUs: 924 HostName: "ghrun-tjszgfszoq" NodeId: 1 StartTimeMs: 1768047622454 CreateTimeMs: 1768047622445 UpdateTimeMs: 1768047622454 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:20:22.455226Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7593706714333376306:2530] 2026-01-10T12:20:22.455257Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7593706714333376307:2531], trace_id# 2026-01-10T12:20:22.456884Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7593706714333376265:2522] SeqNo# 1 Rows# 1 trace_id# 2026-01-10T12:20:22.457029Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7593706714333376307:2531] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1278 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 791 FinishTimeMs: 1768047622454 InputRows: 1 InputBytes: 20 ComputeCpuTimeUs: 103 BuildCpuTimeUs: 688 HostName: "ghrun-tjszgfszoq" NodeId: 1 CreateTimeMs: 1768047622453 UpdateTimeMs: 1768047622454 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:20:22.457074Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7593706714333376307:2531] 2026-01-10T12:20:22.457156Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:20:22.457234Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [1:7593706714333376302:2522] TxId: 281474976715674. Ctx: { TraceId: 01kekxke27e7xyg54zncwz5vkp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjMmQzYTEtMzllYjZjMi02NGI3YzkwNC0zM2IyOTVlMg==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.009583s ReadRows: 1 ReadBytes: 20 RequestUnits# 6 ForceFlag# true trace_id# 2026-01-10T12:20:22.461343Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047622488, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 13171, MsgBus: 21680 2026-01-10T12:20:16.536309Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706688649850164:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.536765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:16.803341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:16.803509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:16.834168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:16.850864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:16.854787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:16.976731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:16.976756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:16.976764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:16.976833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:17.116944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:17.421944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.440771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.525853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.548256Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:17.688334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.872232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.952863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.917902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706701534753790:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.918053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.919119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706701534753800:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:19.919176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.235856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.271122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.302219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.336231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.371082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.413653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.457362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.548165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.657144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705829721969:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.657294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.657489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705829721974:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.657604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705829721976:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.657677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.661590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:20.676195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706705829721978:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:20.726426Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706705829722029:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:21.530064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706688649850164:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.562475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Reverse":false,"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Test","ReadColumns":["Group (-∞, +∞)","Name (-∞, ... :1}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":229631,"CpuTimeUs":224149},"ProcessCpuTimeUs":318,"TotalDurationUs":243328,"ResourcePoolId":"default","QueuedTimeUs":383},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Size":"0","Reverse":false,"Name":"TableFullScan","E-Rows":"0","Table":"Test","ReadColumns":["Group (-∞, +∞)","Name (-∞, +∞)","Amount","Comment"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Operators":[{"A-SelfCpu":0.978,"A-Cpu":0.978,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"A-SelfCpu":0.673,"A-Cpu":1.651,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 6352 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1651 affected_shards: 1 } compilation { duration_us: 229631 cpu_time_us: 224149 } process_cpu_time_us: 318 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}},{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[3,20]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1238,\"Max\":1238,\"Min\":1238,\"History\":[3,1238]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"Introspections\":[\"1 tasks default for source scan\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":52,\"Max\":52,\"Min\":52,\"History\":[3,52]},\"Tasks\":1,\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1768047622663,\"CpuTimeUs\":{\"Count\":1,\"Sum\":978,\"Max\":978,\"Min\":978,\"History\":[3,978]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1845,\"Max\":1845,\"Min\":1845,\"History\":[3,1845]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":3}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[4,20]}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[4,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"Introspections\":[\"1 minimum tasks for compute\"],\"InputBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"Tasks\":1,\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":1000,\"BaseTimeMs\":1768047622663,\"CpuTimeUs\":{\"Count\":1,\"Sum\":673,\"Max\":673,\"Min\":673,\"History\":[3,673]},\"UpdateTimeMs\":3,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[3,20]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20,\"History\":[3,20]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1238,\"Max\":1238,\"Min\":1238,\"History\":[3,1238]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":229631,\"CpuTimeUs\":224149},\"ProcessCpuTimeUs\":318,\"TotalDurationUs\":243328,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":383},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-SelfCpu\":0.978,\"A-Cpu\":0.978,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-SelfCpu\":0.673,\"A-Cpu\":1.651,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:45\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'560) \'(\'\"_id\" \'\"802f19b4-5a204ccb-ca384a05-58d6699b\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'573) \'(\'\"_id\" \'\"d63798ff-e8686d05-15c9f03d-b59fb7d1\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 243328 total_cpu_time_us: 226118 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":45},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1768047622\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"dd625e01-409927d2-88094569-bcbc9d58\",\"version\":\"1.0\"}" |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::WriteRow+isSink-isOlap [GOOD] >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 8276, MsgBus: 9150 2026-01-10T12:20:16.961739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706689628069829:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.962025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:17.158626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:17.165573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:17.165691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:17.235098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:17.277928Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706689628069695:2081] 1768047616949366 != 1768047616949369 2026-01-10T12:20:17.287080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:17.376834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:17.376863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:17.376871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:17.376938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:17.424677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:17.886476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.904828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.968488Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:17.977870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.157709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.332788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.408452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.288489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706706807940757:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.288594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.289006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706706807940767:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.289060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.622170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.659599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.696186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.726183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.768453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.811104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.849898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.904862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.997388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706706807941647:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.997475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.997800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706706807941652:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.997836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706706807941653:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.998099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.001832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:21.017274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706706807941656:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:21.092725Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706711102909003:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:21.961138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706689628069829:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.962643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> KqpCost::CTAS-isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 25799, MsgBus: 15037 2026-01-10T12:20:16.729032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706690879199881:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:16.730142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:16.990635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:17.041290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:17.041375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:17.082976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:17.113923Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:17.116626Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706690879199860:2081] 1768047616727358 != 1768047616727361 2026-01-10T12:20:17.180733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:17.180764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:17.180770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:17.180842Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:17.202637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:17.665634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.671334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.748441Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:17.756266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.927832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.116522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.187980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.164771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706708059070930:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.164890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.165332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706708059070940:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.165371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.505130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.549837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.591095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.632558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.669473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.710072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.753056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.814046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.899002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706708059071814:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.899087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.899333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706708059071820:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.899338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706708059071819:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.899396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.902824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:20.915431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706708059071823:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:21.019082Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706708059071874:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:21.730077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706690879199881:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.730184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:22.657034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: 2026-01-10T12:18:25.538560Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:25.627782Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:25.627873Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:25.627947Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.628018Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:25.651782Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:25.677176Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "aaa" Generation: 1 Important: true } 2026-01-10T12:18:25.678419Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:25.681367Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:25.685314Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:25.687395Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:25.699830Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:25.700393Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8fbf40e9-32fb2550-a37ab8d7-6b91d13e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-10T12:18:26.203882Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-10T12:18:26.254349Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.254440Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.254494Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.254559Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:185:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:188:2057] recipient: [2:187:2197] Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:190:2057] recipient: [2:187:2197] 2026-01-10T12:18:26.312562Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.312632Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.312741Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.312786Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:189:2198] 2026-01-10T12:18:26.334617Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.402610Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.434143Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.444821Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.477424Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.530725Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.573365Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.687011Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.718458Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.003091Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.052281Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.381350Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.655322Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:27.729626Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:270:2057] recipient: [2:14:2061] 2026-01-10T12:18:27.931348Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:27.932158Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "aaa" Generation: 2 Important: true } 2026-01-10T12:18:27.933109Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:276:2198] 2026-01-10T12:18:27.935048Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:276:2198] 2026-01-10T12:18:27.937448Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:277:2198] 2026-01-10T12:18:27.938723Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:277:2198] 2026-01-10T12:18:27.951326Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:27.952609Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|52c780d2-3ddc8f7e-88265512-ce5ed750_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:181:2194] 2026-01-10T12:18:28.429486Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] Leader for TabletID 72057594037927937 is [3:112:2142] sender: [3:113:2057] recipient: [3:106:2138] 2026-01-10T12:18:28.474547Z node 3 :PQ_TX IN ... 6143Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:777:2551] 2026-01-10T12:20:23.778988Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:778:2551] 2026-01-10T12:20:23.790866Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:23.800632Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [69:778:2551] 2026-01-10T12:20:23.822662Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:23.823881Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [69:777:2551] 2026-01-10T12:20:23.856016Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:179:2192] 2026-01-10T12:20:23.900494Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:718:2551] sender: [69:812:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:718:2551] sender: [69:815:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:718:2551] sender: [69:818:2057] recipient: [69:817:2619] Leader for TabletID 72057594037927937 is [69:819:2620] sender: [69:820:2057] recipient: [69:817:2619] 2026-01-10T12:20:23.955173Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:23.955240Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:23.956136Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:23.956190Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:23.956962Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:880:2620] 2026-01-10T12:20:23.959765Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:881:2620] 2026-01-10T12:20:23.970686Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:23.971917Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [69:881:2620] 2026-01-10T12:20:23.993080Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.004714Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [69:880:2620] 2026-01-10T12:20:24.032583Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:179:2192] 2026-01-10T12:20:24.074701Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:819:2620] sender: [69:913:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:819:2620] sender: [69:916:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:819:2620] sender: [69:919:2057] recipient: [69:918:2688] Leader for TabletID 72057594037927937 is [69:920:2689] sender: [69:921:2057] recipient: [69:918:2688] 2026-01-10T12:20:24.122927Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:24.122990Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:24.123756Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:24.123806Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:24.124556Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:983:2689] 2026-01-10T12:20:24.127115Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:984:2689] 2026-01-10T12:20:24.137628Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.144451Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [69:984:2689] 2026-01-10T12:20:24.157832Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.158909Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [69:983:2689] 2026-01-10T12:20:24.220595Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:179:2192] 2026-01-10T12:20:24.243337Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:920:2689] sender: [69:1018:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:920:2689] sender: [69:1021:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:920:2689] sender: [69:1024:2057] recipient: [69:1023:2761] Leader for TabletID 72057594037927937 is [69:1025:2762] sender: [69:1026:2057] recipient: [69:1023:2761] 2026-01-10T12:20:24.303894Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:24.303959Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:24.304780Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:24.304832Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:24.305541Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1090:2762] 2026-01-10T12:20:24.308163Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1091:2762] 2026-01-10T12:20:24.318581Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.319495Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [69:1091:2762] 2026-01-10T12:20:24.339369Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.340499Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [69:1090:2762] 2026-01-10T12:20:24.370942Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:179:2192] 2026-01-10T12:20:24.423313Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:1025:2762] sender: [69:1123:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:1025:2762] sender: [69:1126:2057] recipient: [69:103:2137] Leader for TabletID 72057594037927937 is [69:1025:2762] sender: [69:1129:2057] recipient: [69:1128:2834] Leader for TabletID 72057594037927937 is [69:1130:2835] sender: [69:1131:2057] recipient: [69:1128:2834] 2026-01-10T12:20:24.482342Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:24.482405Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:24.483222Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:24.483272Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:20:24.484098Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1197:2835] 2026-01-10T12:20:24.486685Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1198:2835] 2026-01-10T12:20:24.496875Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.505520Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [69:1198:2835] 2026-01-10T12:20:24.525904Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:20:24.526980Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [69:1197:2835] 2026-01-10T12:20:24.556866Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2026-01-10T12:19:54.427806Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706595880860104:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:54.427873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:54.643966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:54.651074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:54.651165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:54.708821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:54.733426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:54.740127Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706595880860075:2081] 1768047594425298 != 1768047594425301 2026-01-10T12:19:54.803791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:19:54.803829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:19:54.803840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:19:54.804008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:54.918041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.020713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:55.129614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.435038Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:59.429293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706595880860104:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:59.429394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:09.600008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:20:09.600034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:19.525324Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706702872636995:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.525511Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:19.540349Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:19.601861Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:19.603366Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706702872636967:2081] 1768047619524397 != 1768047619524400 2026-01-10T12:20:19.637413Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:19.637484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:19.639905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:19.715219Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:19.715240Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:19.715245Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:19.715338Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:19.798832Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:20.005663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.024433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:20.039629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:20:20.048412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.585342Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13288, MsgBus: 16951 2026-01-10T12:20:18.006598Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706699709102645:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:18.006677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:18.289114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:18.289256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:18.291635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:18.337553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:18.373716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:18.464031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:18.464051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:18.464062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:18.464211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:18.628195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:18.898980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:18.906432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:18.959047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.026892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:19.103510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.262938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.319045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.269726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706712594006352:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.269861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.270235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706712594006362:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.270291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.539172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.571697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.602997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.634661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.670491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.708701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.797131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.873558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.959494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706712594007233:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.959580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.959673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706712594007238:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.959791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706712594007240:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.959830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.963554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:21.977947Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706712594007242:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:22.082677Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706716888974591:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:23.005269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706699709102645:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:23.005384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2026-01-10T12:19:57.397404Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706607534141636:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:57.397461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.669982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:57.670093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:57.724217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.729950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:57.742965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:57.762707Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706607534141607:2081] 1768047597396644 != 1768047597396647 2026-01-10T12:19:57.935757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:57.950879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:19:57.974058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.040516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.185719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.239634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.760563Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706621932144317:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.760745Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.785951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.860969Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:00.863696Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706621932144273:2081] 1768047600755349 != 1768047600755352 2026-01-10T12:20:00.875560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:00.875662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:00.908705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:01.070829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.118281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:01.138233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.188772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.237033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.299817Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706639378758688:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:04.299875Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:04.314013Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.379919Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:04.381439Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706639378758662:2081] 1768047604299209 != 1768047604299212 2026-01-10T12:20:04.418213Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.418300Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.424292Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:04.532412Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.602591Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:04.621958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.670678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.713458Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.859187Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706651309415049:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:07.859276Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:07.879616Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.960372Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:07.974683Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:07.974770Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:08.005750Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:08.063066Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.190925Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:08.207973Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.287445Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.337097Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.211290Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593706670148200486:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:11.211362Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:11.228384Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.303028Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:11.331313Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:11.331402Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:11.346412Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:11.426930Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.551753Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:11.576832Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.633354Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.685553Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.276815Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593706684759015995:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:15.277748Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:15.291359Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:15.409977Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:15.424307Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:15.424413Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:15.427625Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:15.537348Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:15.656115Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:15.677234Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.743957Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.811876Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.670501Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593706704732879045:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.670740Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:19.688126Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:19.797362Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:19.799337Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:19.800137Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593706704732879020:2081] 1768047619667776 != 1768047619667779 2026-01-10T12:20:19.836781Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:19.836889Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:19.849332Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:20.018481Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:20.109904Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.133508Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.204267Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.271419Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 7909, MsgBus: 21786 2026-01-10T12:20:17.639710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706694005692236:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:17.639791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:17.924863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:17.942575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:17.942889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:17.995927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:18.045204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:18.047710Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706694005692206:2081] 1768047617635054 != 1768047617635057 2026-01-10T12:20:18.132698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:18.132753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:18.132764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:18.132932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:18.203156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:18.599636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:18.608323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:18.650718Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:18.662474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.810747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.975184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.038305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.128104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706711185563267:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.128245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.128626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706711185563277:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.128670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.471183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.505208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.540825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.572707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.606780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.654238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.695016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.738703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.830515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706711185564147:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.830626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.830997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706711185564152:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.831048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706711185564153:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.831080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.835222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:21.851329Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706711185564156:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:21.938000Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706711185564207:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:22.640616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706694005692236:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:22.640677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:23.588705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 4331 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 399 affected_shards: 1 } compilation { duration_us: 61785 cpu_time_us: 56566 } process_cpu_time_us: 682 total_duration_us: 68011 total_cpu_time_us: 57647 query_phases { duration_us: 3613 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 120 affected_shards: 1 } compilation { duration_us: 52416 cpu_time_us: 47644 } process_cpu_time_us: 572 total_duration_us: 57617 total_cpu_time_us: 48336 2026-01-10T12:20:23.939993Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=5; 2026-01-10T12:20:23.951770Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:20:23.951989Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:20:23.952259Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706719775499258:2531], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [1:7593706719775499094:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037927, Sink=[1:7593706719775499258:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:23.952861Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706719775499252:2531], SessionActorId: [1:7593706719775499094:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7593706719775499094:2531]. 2026-01-10T12:20:23.953140Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTgwYTBiNjgtMzFkMmI1NmEtZjI5OWE5ZTgtYzYxYzA3NGY=, ActorId: [1:7593706719775499094:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkfp0fjqmt2b3kzx154jp, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7593706719775499253:2531] from: [1:7593706719775499252:2531] trace_id# 2026-01-10T12:20:23.953273Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706719775499253:2531] TxId: 281474976710677. Ctx: { TraceId: 01kekxkfp0fjqmt2b3kzx154jp, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTgwYTBiNjgtMzFkMmI1NmEtZjI5OWE5ZTgtYzYxYzA3NGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:20:23.953604Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTgwYTBiNjgtMzFkMmI1NmEtZjI5OWE5ZTgtYzYxYzA3NGY=, ActorId: [1:7593706719775499094:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkfp0fjqmt2b3kzx154jp, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 15936 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 87 } compilation { duration_us: 63528 cpu_time_us: 59091 } process_cpu_time_us: 742 total_duration_us: 81148 total_cpu_time_us: 59920 query_phases { duration_us: 3796 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 341 affected_shards: 1 } compilation { duration_us: 58905 cpu_time_us: 53862 } process_cpu_time_us: 661 total_duration_us: 64481 total_cpu_time_us: 54864 query_phases { duration_us: 2838 cpu_time_us: 419 affected_shards: 1 } compilation { duration_us: 80275 cpu_time_us: 75580 } process_cpu_time_us: 504 total_duration_us: 86509 total_cpu_time_us: 76503 query_phases { duration_us: 4061 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 390 affected_shards: 1 } compilation { duration_us: 74803 cpu_time_us: 63969 } process_cpu_time_us: 632 total_duration_us: 82476 total_cpu_time_us: 64991 query_phases { duration_us: 4242 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 392 affected_shards: 1 } compilation { duration_us: 93745 cpu_time_us: 89039 } process_cpu_time_us: 617 total_duration_us: 102336 total_cpu_time_us: 90048 query_phases { duration_us: 4530 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 344 affected_shards: 1 } compilation { duration_us: 54766 cpu_time_us: 46991 } process_cpu_time_us: 565 total_duration_us: 62426 total_cpu_time_us: 47900 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 3767, MsgBus: 29323 2026-01-10T12:20:14.563297Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706681975516786:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:14.563437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:14.791031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:14.821622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:14.821751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:14.879423Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:14.900234Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706681975516757:2081] 1768047614561636 != 1768047614561639 2026-01-10T12:20:14.902239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:15.026950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:15.026980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:15.026987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:15.027111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:15.039650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:15.461660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:15.510079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.573610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:15.639531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.820249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.881930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.942426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706694860420520:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.942570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.943676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706694860420530:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.943755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.364379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.396565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.427299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.466140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.504971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.545906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.607575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.643352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.726634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699155388702:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.726738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.726991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699155388707:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.727017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706699155388708:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.727083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:18.730576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:18.741140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706699155388711:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:18.816646Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706699155388762:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:19.563783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706681975516786:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.563876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:20.561785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... baseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:20:22.920988Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706716335258611:2627], TxId: 281474976710691, task: 4. Ctx: { TraceId : 01kekxkebs5qwjj0ceagzxsda4. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593706716335258605:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:22.921276Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkebs5qwjj0ceagzxsda4, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1144 cpu_time_us: 542 } query_phases { duration_us: 4046 table_access { name: "/Root/TestTable" partitions_count: 1 } table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1290 affected_shards: 2 } query_phases { duration_us: 2258 cpu_time_us: 1236 } compilation { duration_us: 385939 cpu_time_us: 377577 } process_cpu_time_us: 2225 total_duration_us: 399454 total_cpu_time_us: 382870 2026-01-10T12:20:23.213161Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706720630225943:2640], TxId: 281474976710694, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkerh7nwra02nfpqryp78. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:20:23.225962Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706720630225944:2641], TxId: 281474976710694, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxkerh7nwra02nfpqryp78. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7593706720630225940:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:23.226366Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkerh7nwra02nfpqryp78, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1030 cpu_time_us: 437 } query_phases { duration_us: 18455 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 30849 affected_shards: 1 } query_phases { duration_us: 14767 cpu_time_us: 12881 } compilation { duration_us: 256548 cpu_time_us: 249570 } process_cpu_time_us: 2051 total_duration_us: 296800 total_cpu_time_us: 295788 2026-01-10T12:20:23.488347Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706720630225977:2651], TxId: 281474976710697, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkf239fn537encyer34z6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:20:23.488559Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706720630225978:2652], TxId: 281474976710697, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxkf239fn537encyer34z6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593706720630225974:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:23.488886Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkf239fn537encyer34z6, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 730 cpu_time_us: 317 } query_phases { duration_us: 3546 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 563 affected_shards: 1 } query_phases { duration_us: 1482 cpu_time_us: 192 } compilation { duration_us: 243514 cpu_time_us: 235439 } process_cpu_time_us: 1729 total_duration_us: 252650 total_cpu_time_us: 238240 2026-01-10T12:20:23.770107Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706720630226025:2664], TxId: 281474976710700, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkfa9bxwf4h7qvwbva96g. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. query_phases { duration_us: 632 cpu_time_us: 262 } query_phases { duration_us: 3347 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 552 affected_shards: 1 } query_phases { duration_us: 1423 cpu_time_us: 213 } compilation { duration_us: 262322 cpu_time_us: 253289 } process_cpu_time_us: 1508 total_duration_us: 273016 total_cpu_time_us: 255824 2026-01-10T12:20:23.770325Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706720630226026:2665], TxId: 281474976710700, task: 2. Ctx: { TraceId : 01kekxkfa9bxwf4h7qvwbva96g. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593706720630226022:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:23.770592Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkfa9bxwf4h7qvwbva96g, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# 2026-01-10T12:20:24.038259Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706724925193354:2676], TxId: 281474976710703, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkfk08k45b95gw6t6mnve. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:20:24.038441Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706724925193355:2677], TxId: 281474976710703, task: 2. Ctx: { TraceId : 01kekxkfk08k45b95gw6t6mnve. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593706724925193351:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:24.038770Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkfk08k45b95gw6t6mnve, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 667 cpu_time_us: 274 } query_phases { duration_us: 3384 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 522 affected_shards: 1 } query_phases { duration_us: 1499 cpu_time_us: 216 } compilation { duration_us: 251879 cpu_time_us: 243694 } process_cpu_time_us: 1645 total_duration_us: 261535 total_cpu_time_us: 246351 2026-01-10T12:20:24.359279Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706724925193385:2687], TxId: 281474976710706, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkfvhdqfmzttk5smhdshj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-10T12:20:24.359749Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593706724925193386:2688], TxId: 281474976710706, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxkfvhdqfmzttk5smhdshj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7593706724925193382:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:20:24.360064Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTM3NTQ0MTctZDdmNDIxMGItYTFhMmE2YmEtNjY1MGJiODI=, ActorId: [1:7593706707745323617:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxkfvhdqfmzttk5smhdshj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 757 cpu_time_us: 338 } query_phases { duration_us: 3337 table_access { name: "/Root/TestTable2" partitions_count: 1 } cpu_time_us: 543 affected_shards: 1 } query_phases { duration_us: 2252 cpu_time_us: 214 } compilation { duration_us: 296316 cpu_time_us: 287728 } process_cpu_time_us: 1714 total_duration_us: 310329 total_cpu_time_us: 290537 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> KqpCost::CTASWithRetry-isOlap >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 17901, MsgBus: 62413 2026-01-10T12:20:18.745338Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706700134289574:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:18.746124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:18.995870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:19.025499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:19.025608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:19.053123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:19.142643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:19.196088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:19.196113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:19.196120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:19.196226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:19.296589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:19.581855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:19.642342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.747472Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:19.791547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.996426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.076834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.070493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706717314160519:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.070597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.070929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706717314160529:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.070994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.358981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.398029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.431994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.467368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.503231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.541439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.581034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.630962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.709689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706717314161401:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.709765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.709932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706717314161406:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.709987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706717314161408:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.710046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.714049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:22.726686Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706717314161410:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:22.824812Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706717314161463:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:23.740785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706700134289574:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:23.740868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:24.516247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.795881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 6619 table_access { name: "/Root/.tmp/sessions/51b7b42f-452c-cd01-9b65-dcbe02a1ec58/Root/TestTable2_c158f246-4a26-1bdd-196f-b6bd7a4be26b" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 1566 affected_shards: 1 } compilation { duration_us: 11520 cpu_time_us: 7662 } process_cpu_time_us: 1771 total_duration_us: 294078 total_cpu_time_us: 10999 2026-01-10T12:20:24.956101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:20:24.961216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17008, MsgBus: 25568 2026-01-10T12:20:17.940006Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706696987161310:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:17.940528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:18.278435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:18.281767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:18.281854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:18.284374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:18.307513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:18.324891Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706696987161191:2081] 1768047617927066 != 1768047617927069 2026-01-10T12:20:18.408288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:18.408316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:18.408326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:18.408426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:18.579188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:18.843584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:18.904690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.941069Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:19.032723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.178824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.247144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.222139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706714167032256:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.222295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.222761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706714167032266:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.222805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.581437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.611623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.639189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.670121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.704722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.782144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.837128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.891253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.975860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706714167033146:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.975939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.976166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706714167033151:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.976198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706714167033152:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.976259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.980260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:21.995244Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706714167033155:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:22.071945Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706718462000502:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:22.934382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706696987161310:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:22.934465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:23.822564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099 ... 57] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:142:2057] recipient: [56:141:2158] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:144:2057] recipient: [56:141:2158] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:143:2159] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:259:2057] recipient: [56:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] !Reboot 72057594037927937 (actor [57:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:140:2057] recipient: [57:39:2086] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:143:2057] recipient: [57:142:2158] Leader for TabletID 72057594037927937 is [57:144:2159] sender: [57:145:2057] recipient: [57:142:2158] !Reboot 72057594037927937 (actor [57:58:2099]) rebooted! !Reboot 72057594037927937 (actor [57:58:2099]) tablet resolver refreshed! new actor is[57:144:2159] Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:59:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:76:2057] recipient: [58:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:54:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:54:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:81:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:84:2057] recipient: [63:83:2114] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:86:2057] recipient: [63:83:2114] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:85:2115] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:201:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:81:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:84:2057] recipient: [64:83:2114] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:86:2057] recipient: [64:83:2114] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:85:2115] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:201:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:82:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:85:2057] recipient: [65:84:2114] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:87:2057] recipient: [65:84:2114] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:86:2115] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:202:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:85:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:88:2057] recipient: [66:87:2117] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:90:2057] recipient: [66:87:2117] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:89:2118] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:205:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:205:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:86:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:89:2057] recipient: [68:88:2117] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:91:2057] recipient: [68:88:2117] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:90:2118] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:206:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpCost::CTAS+isOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2026-01-10T12:19:44.286449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706555020557529:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:44.288450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:44.575784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.588619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:44.588826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:44.598947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:44.600926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706555020557486:2081] 1768047584281672 != 1768047584281675 2026-01-10T12:19:44.616532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:44.788631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:44.811286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:44.887949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:44.990454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:45.038379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:47.656856Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706564858315140:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:47.656916Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:47.674136Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.749409Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:47.750819Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706564858315115:2081] 1768047587656321 != 1768047587656324 2026-01-10T12:19:47.789710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:47.789795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:47.796947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:47.876952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:47.982973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:47.988339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:48.007355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.064410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:48.110237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:50.775642Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706578888961301:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:50.775825Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:50.791069Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:50.868210Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:50.869564Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706578888961198:2081] 1768047590772999 != 1768047590773002 2026-01-10T12:19:50.910721Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:50.910825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:50.918204Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:51.036459Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:51.112744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:51.121690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:51.136590Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.211772Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:51.264431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.120851Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706595713150094:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:54.120924Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:54.136274Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:54.212123Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:54.252148Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:54.252243Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:54.257991Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:54.446737Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:54.459708Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:54.482292Z node 4 :FLAT_TX_SCHEMESHAR ... 1374: Notification cookie mismatch for subscription [7:7593706646671998267:2081] 1768047606959779 != 1768047606959782 2026-01-10T12:20:07.085063Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:07.085135Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:07.113019Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:07.275681Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.366535Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:07.389802Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.454915Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:07.511723Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.242717Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706667613700163:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:11.242809Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:11.264194Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.353860Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:11.353953Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:11.355726Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:11.357550Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706667613700137:2081] 1768047611242244 != 1768047611242247 2026-01-10T12:20:11.366133Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:11.434490Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:11.641969Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:11.648359Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:11.667887Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.747406Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:11.796027Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:15.923356Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706687792804017:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:15.923433Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:15.957613Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:16.044870Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:16.044949Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:16.055126Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:16.085110Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:16.215803Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:16.394894Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:16.421661Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:16.490110Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:16.551301Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.086957Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706712349761697:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.087064Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:21.113707Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:21.215194Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:21.216651Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706712349761672:2081] 1768047621086370 != 1768047621086373 2026-01-10T12:20:21.228357Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:21.228446Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:21.263041Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:21.325295Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:21.555995Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:21.579601Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.657448Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.724371Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 9331, MsgBus: 20685 2026-01-10T12:20:19.921784Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706704520290860:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.922098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:20.228068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:20.228222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:20.267936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:20.277506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:20.296236Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706704520290640:2081] 1768047619899547 != 1768047619899550 2026-01-10T12:20:20.302134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:20.416003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:20.416051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:20.416060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:20.416204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:20.447291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:20.905608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.914206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:20.919504Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:20.987336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.145568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.322760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.386088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.340209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706721700161694:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.340383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.340800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706721700161704:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.340899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.601389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.634937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.666080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.697529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.732210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.762763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.793516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.836281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.910335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706721700162577:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.910418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.910650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706721700162583:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.910696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706721700162582:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.910809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.914963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:23.923989Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706721700162586:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:24.015110Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706725995129935:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:24.920141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706704520290860:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.920237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 24391, MsgBus: 27300 2026-01-10T12:20:19.617375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706702716017797:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.617521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:19.855409Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:19.859150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:19.859271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:19.926934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:19.991320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:19.996828Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706702716017690:2081] 1768047619600754 != 1768047619600757 2026-01-10T12:20:20.104096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:20.117477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:20.117495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:20.117500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:20.117601Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:20.611332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.625576Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:20.664323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.807580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.960016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.049931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.811588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706715600921441:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.811695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.812015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706715600921451:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:22.812066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.079345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.106580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.134072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.163843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.192803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.225476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.255609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.298345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.367968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706719895889617:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.368043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.368066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706719895889622:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.368308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706719895889624:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.368367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.371931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:23.383217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706719895889625:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:23.454793Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706719895889677:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:24.617539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706702716017797:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.617593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:25.312679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCost::OlapWriteRow [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3994, MsgBus: 21861 2026-01-10T12:20:20.661120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706708115969680:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:20.661486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:20.963847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:20.968371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:20.968526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:21.071350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:21.132606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706708115969575:2081] 1768047620648434 != 1768047620648437 2026-01-10T12:20:21.157323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:21.161442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:21.245710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:21.245736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:21.245744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:21.245852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:21.669135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:21.680705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:21.688935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:21.749773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.902815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.053543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.122931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.088405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706725295840636:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.088537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.089100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706725295840646:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.089157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.367145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.401473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.431233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.459230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.486542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.520047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.554203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.613572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.692669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706725295841516:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.692736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.692991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706725295841521:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.693024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706725295841522:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.693130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.696532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:24.708379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706725295841525:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:24.796600Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706725295841576:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:25.655621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706708115969680:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:25.655689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:26.367650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 19650, MsgBus: 16614 2026-01-10T12:20:20.864326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706706155452186:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:20.864770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:21.136184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:21.183496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:21.183578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:21.251796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:21.259227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:21.309506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:21.356728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:21.356758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:21.356766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:21.356857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:21.776560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:21.854655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.860820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:22.019831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.168161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.244520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.116869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706723335323112:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.117002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.117394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706723335323122:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.117468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.437086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.466829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.495661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.526841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.559823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.598121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.636849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.685375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.771884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706723335323991:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.771972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.772272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706723335323997:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.772332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.772378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706723335323996:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:24.775974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:24.788437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706723335324000:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:24.873969Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706723335324051:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:25.848793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706706155452186:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:25.848881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:26.555561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.589944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.645572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow [GOOD] Test command err: Trying to start YDB, gRPC: 6357, MsgBus: 17551 2026-01-10T12:20:19.921310Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706703483550741:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:19.921682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:20.144503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:20.209822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:20.209944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:20.219819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:20.220855Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706703483550697:2081] 1768047619919478 != 1768047619919481 2026-01-10T12:20:20.239919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:20.345838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:20.345866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:20.345876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:20.346005Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:20.395680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:20.771746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.803449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:20.859511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.937595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:21.057722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.240199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.319134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.150350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706720663421757:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.150498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.150802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706720663421767:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.150853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.516867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.554218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.583188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.611799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.640963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.675613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.739060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.775451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.844997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706720663422636:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.845037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706720663422641:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.845068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.845235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706720663422644:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.845273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:23.848178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:23.859232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706720663422643:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:23.939854Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706720663422696:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:24.921631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706703483550741:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.921695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:25.483959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 0T12:20:25.945711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.945815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.945835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.954421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.954488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.954504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.957923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.957971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.957984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.962968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.963038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:25.963054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 5822 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 320 affected_shards: 1 } query_phases { duration_us: 4624 affected_shards: 1 } compilation { duration_us: 47722 cpu_time_us: 44175 } process_cpu_time_us: 657 total_duration_us: 59718 total_cpu_time_us: 45152 query_phases { duration_us: 5745 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 72 affected_shards: 1 } query_phases { duration_us: 5304 affected_shards: 1 } compilation { duration_us: 55484 cpu_time_us: 50685 } process_cpu_time_us: 797 total_duration_us: 68534 total_cpu_time_us: 51554 query_phases { duration_us: 18953 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 374 affected_shards: 1 } query_phases { duration_us: 9451 affected_shards: 1 } compilation { duration_us: 65087 cpu_time_us: 60180 } process_cpu_time_us: 875 total_duration_us: 95658 total_cpu_time_us: 61429 query_phases { duration_us: 40206 table_access { name: "/Root/TestTable" updates { rows: 2 bytes: 744 } partitions_count: 2 } cpu_time_us: 410 affected_shards: 2 } query_phases { duration_us: 9376 affected_shards: 2 } compilation { duration_us: 79767 cpu_time_us: 73550 } process_cpu_time_us: 837 total_duration_us: 131460 total_cpu_time_us: 74797 query_phases { duration_us: 18380 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 337 affected_shards: 1 } query_phases { duration_us: 5400 affected_shards: 1 } compilation { duration_us: 77630 cpu_time_us: 73213 } process_cpu_time_us: 734 total_duration_us: 103212 total_cpu_time_us: 74284 query_phases { duration_us: 9814 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 280 affected_shards: 1 } query_phases { duration_us: 10056 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 317 affected_shards: 2 } query_phases { duration_us: 10004 affected_shards: 2 } compilation { duration_us: 78700 cpu_time_us: 74421 } process_cpu_time_us: 1246 total_duration_us: 112450 total_cpu_time_us: 76264 2026-01-10T12:20:26.851653Z node 1 :TX_COLUMNSHARD_RESTORE WARN: log.cpp:841: tablet_id=72075186224037935;tablet_actor_id=[1:7593706729253357728:2542];this=137147255707200;activity=1;task_id=beef5156-ee1e11f0-89cfb5ba-a98de42b::4;fline=restore.cpp:28;event=merge_data_problems;write_id=4;tablet_id=72075186224037935;message=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}; 2026-01-10T12:20:26.851899Z node 1 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7593706729253357728:2542];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037935;event=TEvWriteBlobsResult;fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]};tx_id=281474976710689; 2026-01-10T12:20:26.852518Z node 1 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:152: Scan [1:7593706733548325843:2820] got AbortExecution txId: 281474976710689 scanId: 1 gen: 1 tablet: 72075186224037935 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]} } 2026-01-10T12:20:26.881186Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706733548325840:2818], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [0:0:0]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037935, Sink=[1:7593706733548325840:2818].{
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } 2026-01-10T12:20:26.881341Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [1:7593706733548325838:2818], TxId: 281474976710689, task: 1. Ctx: { TraceId : 01kekxkjg52p64qbc68hdenk3j. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODIyMTA0OWQtM2ExMmVmNGEtNDI1YWE3YWYtZmIwY2NhYjc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } } 2026-01-10T12:20:26.881438Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706733548325838:2818], TxId: 281474976710689, task: 1. Ctx: { TraceId : 01kekxkjg52p64qbc68hdenk3j. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODIyMTA0OWQtM2ExMmVmNGEtNDI1YWE3YWYtZmIwY2NhYjc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } }. 2026-01-10T12:20:26.882083Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODIyMTA0OWQtM2ExMmVmNGEtNDI1YWE3YWYtZmIwY2NhYjc=, ActorId: [1:7593706729253357584:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkjg52p64qbc68hdenk3j, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key. {\"sorting_columns\":[{\"name\":\"Group\",\"value\":\"1\"},{\"name\":\"Name\",\"value\":\"Anna\"}],\"fields\":[\"Group: Uint32\",\"Name: String\"]}" issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 57486 cpu_time_us: 110 } compilation { duration_us: 64767 cpu_time_us: 59538 } process_cpu_time_us: 983 total_duration_us: 124339 total_cpu_time_us: 60631 query_phases { duration_us: 11594 cpu_time_us: 445 affected_shards: 1 } query_phases { duration_us: 5653 affected_shards: 1 } compilation { duration_us: 77742 cpu_time_us: 72357 } process_cpu_time_us: 735 total_duration_us: 97083 total_cpu_time_us: 73537 query_phases { duration_us: 6915 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 365 affected_shards: 1 } query_phases { duration_us: 6010 affected_shards: 1 } compilation { duration_us: 51588 cpu_time_us: 46870 } process_cpu_time_us: 740 total_duration_us: 66355 total_cpu_time_us: 47975 query_phases { duration_us: 7068 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 394 affected_shards: 1 } query_phases { duration_us: 5344 affected_shards: 1 } compilation { duration_us: 57466 cpu_time_us: 52167 } process_cpu_time_us: 942 total_duration_us: 72374 total_cpu_time_us: 53503 query_phases { duration_us: 7938 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 347 affected_shards: 1 } query_phases { duration_us: 12713 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 464 affected_shards: 2 } query_phases { duration_us: 9978 affected_shards: 2 } compilation { duration_us: 94489 cpu_time_us: 88335 } process_cpu_time_us: 1251 total_duration_us: 132031 total_cpu_time_us: 90397 query_phases { duration_us: 839 cpu_time_us: 235 } query_phases { duration_us: 187347 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 40 } deletes { rows: 2 } partitions_count: 2 } cpu_time_us: 18430 affected_shards: 10 } query_phases { duration_us: 14306 affected_shards: 10 } compilation { duration_us: 422212 cpu_time_us: 412531 } process_cpu_time_us: 2215 total_duration_us: 629661 total_cpu_time_us: 433411 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 27210, MsgBus: 29881 2026-01-10T12:20:21.721478Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706712774802155:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:21.721570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:21.965834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:21.971514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:21.971601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:22.035425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:22.062471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:22.063773Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706712774802130:2081] 1768047621720298 != 1768047621720301 2026-01-10T12:20:22.162148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:22.162177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:22.162185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:22.162289Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:22.228181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:22.642240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:22.649529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:22.714460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.736711Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:22.873826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.019878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.092622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.032069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706729954673187:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.032209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.032548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706729954673197:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.032594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.390407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.426852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.461138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.495747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.530525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.570961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.615342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.687977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.768300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706729954674072:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.768390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.768548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706729954674077:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.768577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706729954674078:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.768676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.771974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:25.784396Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706729954674081:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:25.858165Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706729954674132:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:26.721581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706712774802155:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:26.721649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:27.354448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3516 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 157 } compilation { duration_us: 101096 cpu_time_us: 93702 } process_cpu_time_us: 959 total_duration_us: 108868 total_cpu_time_us: 94818 2026-01-10T12:20:28.347597Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=6; 2026-01-10T12:20:28.347866Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706742839576690:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7593706738544609020:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7593706742839576690:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:28.347917Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706742839576684:2531], SessionActorId: [1:7593706738544609020:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7593706738544609020:2531]. 2026-01-10T12:20:28.348128Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkkz9bhbcmrjp3nxdp6z6, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7593706742839576685:2531] from: [1:7593706742839576684:2531] trace_id# 2026-01-10T12:20:28.348235Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706742839576685:2531] TxId: 281474976710687. Ctx: { TraceId: 01kekxkkz9bhbcmrjp3nxdp6z6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:20:28.348448Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkkz9bhbcmrjp3nxdp6z6, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3416 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 410 } compilation { duration_us: 75503 cpu_time_us: 69465 } process_cpu_time_us: 697 total_duration_us: 82714 total_cpu_time_us: 70572 2026-01-10T12:20:28.441815Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=7; 2026-01-10T12:20:28.442141Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706742839576707:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7593706738544609020:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7593706742839576707:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:28.442213Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706742839576701:2531], SessionActorId: [1:7593706738544609020:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7593706738544609020:2531]. 2026-01-10T12:20:28.442421Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm239z25zbqy98g779m4, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7593706742839576702:2531] from: [1:7593706742839576701:2531] trace_id# 2026-01-10T12:20:28.442506Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706742839576702:2531] TxId: 281474976710689. Ctx: { TraceId: 01kekxkm239z25zbqy98g779m4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:20:28.442747Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm239z25zbqy98g779m4, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 4080 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 523 } compilation { duration_us: 79676 cpu_time_us: 73349 } process_cpu_time_us: 779 total_duration_us: 87260 total_cpu_time_us: 74651 2026-01-10T12:20:28.539836Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=8; 2026-01-10T12:20:28.540236Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706742839576724:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7593706738544609020:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7593706742839576724:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:28.540308Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706742839576718:2531], SessionActorId: [1:7593706738544609020:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7593706738544609020:2531]. 2026-01-10T12:20:28.540495Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm51f91r6epdrwpn7f4m, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7593706742839576719:2531] from: [1:7593706742839576718:2531] trace_id# 2026-01-10T12:20:28.540587Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706742839576719:2531] TxId: 281474976710691. Ctx: { TraceId: 01kekxkm51f91r6epdrwpn7f4m, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:20:28.540826Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm51f91r6epdrwpn7f4m, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3931 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 2 bytes: 40 } partitions_count: 1 } cpu_time_us: 503 } compilation { duration_us: 82219 cpu_time_us: 75299 } process_cpu_time_us: 804 total_duration_us: 90396 total_cpu_time_us: 76606 2026-01-10T12:20:28.639766Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=9; 2026-01-10T12:20:28.640125Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7593706742839576741:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7593706738544609020:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7593706742839576741:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:28.640204Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706742839576735:2531], SessionActorId: [1:7593706738544609020:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7593706738544609020:2531]. 2026-01-10T12:20:28.640440Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm888143wxzjfhbhwywa, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7593706742839576736:2531] from: [1:7593706742839576735:2531] trace_id# 2026-01-10T12:20:28.640541Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706742839576736:2531] TxId: 281474976710693. Ctx: { TraceId: 01kekxkm888143wxzjfhbhwywa, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# query_phases { duration_us: 4365 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 3 bytes: 60 } partitions_count: 1 } cpu_time_us: 549 } compilation { duration_us: 81202 cpu_time_us: 74537 } process_cpu_time_us: 977 total_duration_us: 87839 total_cpu_time_us: 76063 2026-01-10T12:20:28.640912Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTc2YWVmYWEtZWM2NjMzOC1mMWEyODJiNy05NGUzN2M3YQ==, ActorId: [1:7593706738544609020:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxkm888143wxzjfhbhwywa, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:19:08.350560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:19:08.350659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:08.350700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:19:08.350747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:19:08.350794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:19:08.350835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:19:08.350909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:08.351023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:19:08.351834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:19:08.352149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:19:08.439284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:19:08.439347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:08.453879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:19:08.453980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:19:08.454205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:19:08.462020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:19:08.462305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:19:08.463061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:08.463351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:19:08.465593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:08.465762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:19:08.467008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:19:08.467071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:08.467161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:19:08.467205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:19:08.467246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:19:08.467492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:19:08.576675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:08.577958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Reads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5181 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 134 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-10T12:20:29.871924Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:29.872000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.5181 2026-01-10T12:20:29.872183Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:29.872266Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:20:29.905330Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:29.905444Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:29.905511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-10T12:20:29.905638Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-10T12:20:29.905702Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-10T12:20:29.905971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-10T12:20:29.906097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-10T12:20:29.906159Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-10T12:20:29.906334Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-10T12:20:29.906591Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2026-01-10T12:20:29.906671Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:227: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-10T12:20:29.906770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-10T12:20:29.906912Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:20:29.907392Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:127:2152], Recipient [3:722:2708]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-10T12:20:29.907690Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:127:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-10T12:20:29.909485Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:20:29.909554Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2026-01-10T12:20:29.918838Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:712:2700], Recipient [3:722:2708]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:20:29.921529Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.244000Z 2026-01-10T12:20:29.921625Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2026-01-10T12:20:29.921686Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:127:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:20:29.922100Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:722:2708], Recipient [3:127:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-10T12:20:29.922191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-10T12:20:29.922323Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-10T12:20:29.922448Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 29.997000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2026-01-10T12:20:29.923094Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:127:2152], Recipient [3:722:2708]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-10T12:20:29.923245Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:127:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-10T12:20:29.925042Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.244000Z 2026-01-10T12:20:29.925080Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2026-01-10T12:20:29.930172Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:712:2700], Recipient [3:722:2708]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:20:29.932053Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.244000Z 2026-01-10T12:20:29.933372Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:712:2700], Recipient [3:722:2708]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:20:29.937575Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.248000Z 2026-01-10T12:20:29.937654Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2026-01-10T12:20:29.937719Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:127:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:20:29.938034Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:722:2708], Recipient [3:127:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-10T12:20:29.938075Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-10T12:20:29.938168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2026-01-10T12:20:29.938239Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.993000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-10T12:20:29.940959Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:712:2700], Recipient [3:722:2708]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:20:29.956626Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:29.956706Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:29.956741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:20:29.982823Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.248000Z ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:19:09.111270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:19:09.111353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:09.111391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:19:09.111443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:19:09.111500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:19:09.111550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:19:09.111608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:09.111674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:19:09.112521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:19:09.112771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:19:09.176511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:19:09.176587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:09.189684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:19:09.189786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:19:09.190004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:19:09.198388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:19:09.198636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:19:09.199329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:09.199554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:19:09.201734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:09.201905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:19:09.203388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:19:09.203458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:09.203544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:19:09.203591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:19:09.203632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:19:09.203909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:19:09.297382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.298983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:09.299455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-10T12:20:30.091955Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-10T12:20:30.092108Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:761:2739]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-10T12:20:30.092142Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-10T12:20:30.183586Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1305:3190]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:30.183676Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:30.183798Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 300 last cleanup 0 2026-01-10T12:20:30.183894Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:20:30.183937Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2026-01-10T12:20:30.183972Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2026-01-10T12:20:30.184011Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2026-01-10T12:20:30.184211Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1305:3190]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:30.184374Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 32 2026-01-10T12:20:30.184735Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1305:3190], Recipient [3:1714:3571]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 33 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 219 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-10T12:20:30.184790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:30.184844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0033 2026-01-10T12:20:30.184954Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:30.185021Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:20:30.195496Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1309:3193]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:30.195582Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:30.195670Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 300 last cleanup 0 2026-01-10T12:20:30.195762Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:20:30.195816Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2026-01-10T12:20:30.195852Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2026-01-10T12:20:30.195897Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2026-01-10T12:20:30.196023Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1309:3193]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:30.196182Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 32 2026-01-10T12:20:30.196559Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1309:3193], Recipient [3:1714:3571]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 219 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-10T12:20:30.196615Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:30.196679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0028 2026-01-10T12:20:30.196808Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:30.218587Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:1714:3571]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:30.218667Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:30.218742Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:1714:3571], Recipient [3:1714:3571]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:30.218777Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:30.229458Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:1714:3571]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2026-01-10T12:20:30.229551Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5308: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2026-01-10T12:20:30.229590Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-10T12:20:30.229684Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-10T12:20:30.229778Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-10T12:20:30.229949Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:1714:3571]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-10T12:20:30.230005Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-10T12:20:30.230387Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269746180, Sender [3:2802:4620], Recipient [3:1714:3571]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-10T12:20:30.230441Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5455: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-10T12:20:30.262474Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2805:4623], Recipient [3:1305:3190]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:30.262577Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:30.262640Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409552, clientId# [3:2804:4622], serverId# [3:2805:4623], sessionId# [0:0:0] 2026-01-10T12:20:30.262915Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2803:4621], Recipient [3:1305:3190]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-01-10T12:20:30.263753Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2808:4626], Recipient [3:1309:3193]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:30.263798Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:20:30.263850Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409553, clientId# [3:2807:4625], serverId# [3:2808:4626], sessionId# [0:0:0] 2026-01-10T12:20:30.263978Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2806:4624], Recipient [3:1309:3193]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCost::OlapRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 22818, MsgBus: 4058 2026-01-10T12:20:24.192511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706725190899066:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.192680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:24.406353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:24.406445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:24.461184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:24.467734Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:24.468624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:24.470477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706725190898962:2081] 1768047624186768 != 1768047624186771 2026-01-10T12:20:24.574799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:24.574823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:24.574834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:24.574916Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:24.636637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:25.011933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:25.019982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:25.068714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.187123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.199091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:25.363091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.427528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.515735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706738075802720:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.515875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.516206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706738075802730:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.516279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.852176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.880529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.905726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.935578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.966528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.006530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.041635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.084643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.173939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706742370770904:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.174030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.174274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706742370770909:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.174325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706742370770910:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.174621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.178757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:28.195811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706742370770913:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:28.302490Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706742370770966:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:29.190876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706725190899066:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:29.190965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:30.237180Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047630265, txId: 281474976715673] shutting down |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery >> KqpCompileFallback::NoFallbackWhenSqlVersion1 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 9146, MsgBus: 63067 2026-01-10T12:20:25.024771Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706729947016774:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:25.024844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:25.281849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:25.380908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706729947016745:2081] 1768047625019272 != 1768047625019275 2026-01-10T12:20:25.383730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:25.383842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:25.387447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:25.403649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:25.491000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:25.500410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:25.500434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:25.500441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:25.500523Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:25.962849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:26.023971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.036459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:26.160510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.313120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.384589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.346438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706742831920515:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.346617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.347164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706742831920525:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.347210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.713661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.746447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.775829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.810611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.844661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.885953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.966876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.033659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.110344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706747126888695:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.110445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.110690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706747126888700:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.110693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706747126888701:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.110733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.114638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:29.126030Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706747126888704:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:29.191144Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706747126888755:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:30.028216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706729947016774:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:30.028339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:31.081541Z node 1 :KQP_GATEWAY DEBUG: kqp_metadata_loader.cpp:922: Load table metadata from cache by path, request Path: /Root/Test 2026-01-10T12:20:31.242488Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snapshot_manager.cpp:37: Start KqpSnapshotManager at [1:7593706755716823629:2521] 2026-01-10T12:20:31.242533Z node 1 :KQP_RESOURCE_MANAGER DE ... 716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:209;event=send_data_to_compute;space=8388608;queue=0;compute_actor_id=[1:7593706755716823644:2530];rows=0; 2026-01-10T12:20:31.262257Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:364;event=register_scanner_actor;actor_id=[1:7593706755716823649:2052];state=Starting;tablet_id=72075186224037914;generation=1;shard_actor=[0:0:0]; 2026-01-10T12:20:31.262300Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:125;event=start_scanner;actor_id=[1:7593706755716823649:2052]; 2026-01-10T12:20:31.262342Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:48;event=scan_ack;actor_id=[1:7593706755716823649:2052]; 2026-01-10T12:20:31.262907Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_fetcher_actor.cpp:134;Recv TEvScanData from ShardID==[1:7593706755716823649:2052];ScanId=1;Finished=1;Lock=;BrokenLocks=; 2026-01-10T12:20:31.263001Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_fetcher_actor.cpp:531;action=got EvScanData;rows=3;finished=1;generation=1;exceeded=0;scan=1;packs_to_send=0;from=[1:7593706755716823649:2052];shards remain=0;in flight scans=1;cursor=;in flight shards=1;delayed_for_seconds_by_ratelimiter=0;tablet_id=72075186224037914;locks=0;broken locks=0; 2026-01-10T12:20:31.263038Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.cpp:40;event=receive_data;actor_id=[1:7593706755716823649:2052];count_chunks=1; 2026-01-10T12:20:31.263064Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:292;event=on_receive;compute_shard_id=NO_VALUE_OPTIONAL; 2026-01-10T12:20:31.263088Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:238;event=add_data_to_compute;rows=3; 2026-01-10T12:20:31.263117Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:209;event=send_data_to_compute;space=8388608;queue=1;compute_actor_id=[1:7593706755716823644:2530];rows=3; 2026-01-10T12:20:31.263155Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:193;event=stop_scanner; 2026-01-10T12:20:31.263185Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:45;event=scan_ack_on_finished;actor_id=[1:7593706755716823649:2052]; 2026-01-10T12:20:31.263204Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:551: SelfId: [1:7593706755716823646:2532]. EVLOGKQP:0/0/3/3 2026-01-10T12:20:31.263213Z node 1 :KQP_COMPUTE DEBUG: log.h:466: kqp_scan_compute_actor.cpp:184 :TEvSendData: [1:7593706755716823646:2532]/[1:7593706755716823644:2530] 2026-01-10T12:20:31.263234Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:392;event=scanner_finished;tablet_id=72075186224037914;stop_shard=1; 2026-01-10T12:20:31.263262Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:100;event=stop_scanner;actor_id=[1:7593706755716823649:2052];message=;final_flag=1; 2026-01-10T12:20:31.263382Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:594: SelfId: [1:7593706755716823646:2532]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2026-01-10T12:20:31.263436Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593706755716823644:2530], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxkpncen7r8c3ff0bt30mh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:20:31.263446Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7593706755716823646:2532];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:441;event=wait_all_scanner_finished;scans=0; 2026-01-10T12:20:31.263519Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:685: SelfId: [1:7593706755716823646:2532]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2026-01-10T12:20:31.263698Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:20:31.263832Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7593706755716823644:2530] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 3413 Tasks { TaskId: 1 CpuTimeUs: 986 FinishTimeMs: 1768047631263 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 183 BuildCpuTimeUs: 803 WaitOutputTimeUs: 347 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 1 StartTimeMs: 1768047631263 CreateTimeMs: 1768047631249 UpdateTimeMs: 1768047631263 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:20:31.263899Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7593706755716823644:2530] 2026-01-10T12:20:31.263942Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7593706755716823645:2531], trace_id# 2026-01-10T12:20:31.263989Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:20:31.264189Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593706755716823645:2531], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxkpncen7r8c3ff0bt30mh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:20:31.264239Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1768047631 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-10T12:20:31.264386Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593706755716823645:2531], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxkpncen7r8c3ff0bt30mh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:20:31.264533Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:20:31.264678Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:20:31.264787Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2026-01-10T12:20:33.263671Z, after 1.999029s 2026-01-10T12:20:31.264934Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7593706755716823609:2521] SeqNo# 1 Rows# 1 trace_id# 2026-01-10T12:20:31.265104Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7593706755716823645:2531] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 9759 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 786 FinishTimeMs: 1768047631264 InputRows: 1 InputBytes: 20 ComputeCpuTimeUs: 186 BuildCpuTimeUs: 600 HostName: "ghrun-tjszgfszoq" NodeId: 1 CreateTimeMs: 1768047631250 UpdateTimeMs: 1768047631264 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:20:31.265143Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7593706755716823645:2531] 2026-01-10T12:20:31.265236Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:20:31.265283Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [1:7593706755716823640:2521] TxId: 281474976710674. Ctx: { TraceId: 01kekxkpncen7r8c3ff0bt30mh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhYzJkMmYtNjU2MTFhNTQtZTMyYmU4OGItOTM2YTdhYTk=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.013172s ReadRows: 3 ReadBytes: 96 RequestUnits# 8 ForceFlag# true trace_id# 2026-01-10T12:20:31.267765Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047631294, txId: 281474976710673] shutting down |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 6224, MsgBus: 2751 2026-01-10T12:20:24.348447Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706723504620164:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.348567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:24.594853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:24.594986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:24.601208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:24.648918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:24.701017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:24.705884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706723504620135:2081] 1768047624346755 != 1768047624346758 2026-01-10T12:20:24.766052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:24.766078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:24.766098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:24.766189Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:24.912391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:25.204253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:25.210126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:25.257767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.359360Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:25.443102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.600510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.669617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.673395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706736389523900:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.673488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.673813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706736389523910:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.673860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:27.994402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.035377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.067866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.099694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.135890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.177101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.225217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.268205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.350498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706740684492074:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.350586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.350919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706740684492080:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.350947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.350944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706740684492079:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.355331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:28.368145Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706740684492083:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:28.467262Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706740684492136:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:29.348637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706723504620164:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:29.348709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:30.207936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... d=72075186224037930;tx_id=281474976710673;this=136427635740864;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630539;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.547431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7593706749274427128:2535];ev=NActors::IEventHandle;tablet_id=72075186224037932;tx_id=281474976710673;this=136427635743104;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630547;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.547970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7593706749274427113:2533];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976710673;this=136427636065216;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630547;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.548974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593706749274427130:2537];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976710673;this=136427636064544;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630548;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.549758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7593706749274427108:2530];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710673;this=136427636063648;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630549;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.550214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7593706749274427114:2534];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=136427636062528;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630549;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.552886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7593706749274427129:2536];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976710673;this=136427636060064;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630552;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.553009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7593706749274427109:2531];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=136427636612672;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630552;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.553421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7593706749274427229:2539];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976710673;this=136427636059840;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047630553;max=18446744073709551615;plan=0;src=[1:7593706723504620500:2151];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.563843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.563952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.563971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.564724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.564788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.564802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.592995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.593054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.593070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.595602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.595659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.595674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.601021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.601072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.601087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.604847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.604896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.604913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.611452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.611519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.611540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.612518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.612558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.612570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.620831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.620886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.620903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.621455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.621492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:30.621506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 61088, MsgBus: 6084 2026-01-10T12:20:26.220462Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706732676721638:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:26.220550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:26.421328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:26.442298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:26.442556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:26.497984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:26.501811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:26.503490Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706732676721611:2081] 1768047626218066 != 1768047626218069 2026-01-10T12:20:26.590723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:26.590753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:26.590761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:26.590848Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:26.627876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:26.969068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:27.017267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.173492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.289621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:27.347266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.428315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.293952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706745561625371:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.294077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.294381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706745561625381:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.294433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:29.693823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.729006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.761426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.796699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.831296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.868263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.935461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.980684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.065884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706749856593550:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.065982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.066280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706749856593556:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.066312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.066318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706749856593555:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.071304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:30.083333Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706749856593559:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:30.189816Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706749856593610:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:31.220170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706732676721638:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:31.220282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:31.819337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] >> TLocksTest::CK_Range_BrokenLockInf [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28441, MsgBus: 5306 2026-01-10T12:20:27.012104Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706736014546139:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:27.012338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:27.235976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:27.236072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:27.273260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:27.287002Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:27.290043Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706736014546034:2081] 1768047627008446 != 1768047627008449 2026-01-10T12:20:27.303879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:27.387771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:27.387797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:27.387804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:27.387879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:27.434467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:27.813028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:27.820970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:27.867449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.006046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.018528Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:28.175364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.248734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.194508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706748899449805:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.194630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.195089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706748899449815:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.195171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.553314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.596873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.633672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.670814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.706576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.742120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.810875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.860652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.933399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706748899450685:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.933514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.933896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706748899450690:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.933937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706748899450691:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.934222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.938323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:30.951312Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706748899450694:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:31.005773Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706748899450745:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:32.011431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706736014546139:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:32.011508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:32.857907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls >> TLocksTest::BrokenDupLock [GOOD] >> TestProgram::CountWithNulls [GOOD] >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 13913, MsgBus: 14693 2026-01-10T12:20:27.379434Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706737383711175:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:27.382089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:27.403130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:27.627084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:27.654760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:27.654858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:27.684000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:27.697166Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:27.725546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706737383711146:2081] 1768047627377405 != 1768047627377408 2026-01-10T12:20:27.787963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:27.788009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:27.788018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:27.788150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:27.879821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:28.236716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:28.322241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.389791Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:28.447318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.620632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.696798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.761587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706750268614908:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.761731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.762512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706750268614918:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:30.762593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.127606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.162549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.199577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.234616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.265575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.328496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.404817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.455154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.558381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706754563583092:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.558433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.558597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706754563583097:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.558623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706754563583098:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.558641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.563286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:31.580278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706754563583101:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:31.655262Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706754563583152:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:32.378820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706737383711175:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:32.378899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:33.439849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... 931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:20:33.861207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:20:33.861237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:20:33.861279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:20:33.861288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7593706763153518185:2539];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976710673;this=137118861603328;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047633861;max=18446744073709551615;plan=0;src=[1:7593706737383711494:2145];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.861314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:20:33.861429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:20:33.861465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-10T12:20:33.861549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-10T12:20:33.861580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-10T12:20:33.861984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7593706763153518127:2532];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710673;this=137118861604000;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047633861;max=18446744073709551615;plan=0;src=[1:7593706737383711494:2145];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.864394Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7593706763153518128:2533];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:458;TablesManager not ready=72075186224037927; 2026-01-10T12:20:33.864394Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7593706763153518199:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:458;TablesManager not ready=72075186224037932; 2026-01-10T12:20:33.871739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7593706763153518262:2541];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976710673;this=137118861597056;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1768047633871;max=18446744073709551615;plan=0;src=[1:7593706737383711494:2145];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.876530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.876627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.876656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.879514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.879577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.879593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.901818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.901881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.901896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.903627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.903686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.903703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.908415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.908471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.908486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.911894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.911968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.911985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.914653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.914729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.914745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.920833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.920915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.920933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.922083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.922143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.922158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.927621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.927678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-10T12:20:33.927694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 293813 table_access { name: "/Root/TestTable" reads { rows: 3 bytes: 108 } } cpu_time_us: 77687 } compilation { duration_us: 350839 cpu_time_us: 342512 } process_cpu_time_us: 446 total_duration_us: 647976 total_cpu_time_us: 420645 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch >> TSubscriberSinglePathUpdateTest::OneRingGroup >> TSubscriberSyncQuorumTest::TwoRingGroups >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberSinglePathUpdateTest::TwoRingGroups >> TSubscriberTest::SyncPartial >> TSubscriberSyncQuorumTest::OneRingGroup >> TSubscriberTest::NotifyDelete >> TSubscriberTest::NotifyUpdate >> TSubscriberTest::StrongNotificationAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"a\":true,\"i\":\"2\",\"p\":{\"function\":{\"function\":\"Count\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"a":true,"i":"2","p":{"function":{"function":"Count","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2026-01-10T12:19:53.690105Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706590711051134:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:53.690262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:53.722564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:53.967529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:53.970815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:53.970939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:54.009110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:54.081188Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:54.102685Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706590711050995:2081] 1768047593683768 != 1768047593683771 2026-01-10T12:19:54.164269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:54.269530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:54.350470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.487209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.538007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:54.692347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:57.164998Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706607008767963:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:57.165062Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.187483Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.303023Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:57.315858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:57.315944Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:57.318490Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:57.409037Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.534468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:57.543258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:57.553672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.622185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:57.677580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.745092Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706620259764405:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.745148Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.759491Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.841551Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:00.843379Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706620259764378:2081] 1768047600744077 != 1768047600744080 2026-01-10T12:20:00.860959Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:00.861025Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:00.864521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:01.028163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.047310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:01.053043Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:01.069390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.127099Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.177773Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.255880Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706639030972207:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:04.255968Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:04.272275Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.346436Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:04.347582Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706639030972182:2081] 1768047604255347 != 1768047604255350 2026-01-10T12:20:04.386986Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.387074Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.396239Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:04.443657Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.608577Z node 4 ... HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:15.927535Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:15.985824Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:16.208014Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:16.213247Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:16.231551Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:16.296662Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:16.362311Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.388978Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706708773723694:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:20.389362Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:20.422589Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:20.526607Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:20.532227Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706708773723577:2081] 1768047620385137 != 1768047620385140 2026-01-10T12:20:20.567433Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:20.567535Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:20.581421Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:20.652198Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:20.891309Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.900458Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:20.913300Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.996664Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.061469Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.202211Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706730798902455:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:25.202406Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:25.320194Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:25.332344Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:25.332452Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:25.350106Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:25.383363Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:25.617797Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:25.728208Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:25.752934Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.823064Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.892616Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.215206Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:30.410755Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706752336300667:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:30.411405Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:30.437200Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:30.539098Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:30.539214Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:30.542219Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:30.580346Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:30.628277Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:30.905329Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:30.916436Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:30.933331Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.015430Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.088778Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSubscriberSinglePathUpdateTest::OneRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup >> TSubscriberSyncQuorumTest::TwoRingGroups [GOOD] >> TSubscriberTest::Boot >> TSubscriberSinglePathUpdateTest::TwoRingGroups [GOOD] >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch >> TSubscriberSyncQuorumTest::OneRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] Test command err: Trying to start YDB, gRPC: 7263, MsgBus: 8621 2026-01-10T12:20:29.653280Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706746854444329:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:29.653355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:29.900184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:29.927933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:29.928055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:29.991819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:29.994259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:29.997721Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706746854444288:2081] 1768047629650825 != 1768047629650828 2026-01-10T12:20:30.108992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:30.143568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:30.143593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:30.143600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:30.143682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:30.508013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:30.596458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.666560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:30.729783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.902602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.979993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.008926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706764034315349:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.009035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.009388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706764034315359:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.009430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.373147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.411280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.450799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.485843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.556908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.598186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.654580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.697246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.792775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706764034316238:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.792909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.793221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706764034316245:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.793230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706764034316244:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.793306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.797259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:33.810104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706764034316248:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:33.906578Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706764034316299:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:34.653606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706746854444329:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:34.653717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:35.466452Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:35.466572Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CCF26024E18 2026-01-10T12:20:35.466611Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706772624251186:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kekxkv099q2jbq18ee7688av, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2RiZjNhMjctY2Y1MTE0ZjYtMzBhZDM3N2ItNWVlMTRhOWU=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:35.466754Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:35.466799Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706772624251186:2531], queueSize: 1 2026-01-10T12:20:35.467240Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n 2026-01-10T12:20:35.467286Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706772624251186:2531], compileActor: [1:7593706772624251194:2536] 2026-01-10T12:20:35.467331Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:35.467411Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706772624251194:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-10T12:20:35.467298Z 2026-01-10T12:20:35.587052Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706772624251194:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047635","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"8f295cf2-c3822612-daae9d1c-e054eaf0","version":"1.0"} 2026-01-10T12:20:35.587586Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706772624251194:2536], duration: 0.120252s 2026-01-10T12:20:35.587627Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706772624251194:2536], owner: [1:7593706759739348014:2385], status: SUCCESS, issues: , uid: 8f295cf2-c3822612-daae9d1c-e054eaf0 2026-01-10T12:20:35.587729Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706772624251186:2531], status: SUCCESS, compileActor: [1:7593706772624251194:2536] 2026-01-10T12:20:35.587823Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706772624251186:2531], queryUid: 8f295cf2-c3822612-daae9d1c-e054eaf0, status:SUCCESS |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::InvalidNotification >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup >> TestProgram::YqlKernelStartsWithScalar >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest >> TestProgram::YqlKernelContains |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TLocksTest::GoodNullLock [GOOD] >> TestProgram::YqlKernelContains [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TSubscriberTest::Boot [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] >> TestProgram::YqlKernelStartsWith >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] Test command err: Trying to start YDB, gRPC: 25473, MsgBus: 24693 2026-01-10T12:20:30.161473Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706748949951126:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:30.162528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:30.397579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:30.412180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:30.412282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:30.497242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:30.571506Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:30.574295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706748949951024:2081] 1768047630155000 != 1768047630155003 2026-01-10T12:20:30.634309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:30.664937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:30.664975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:30.664986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:30.665113Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:31.160273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:31.168878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:31.169449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:31.228270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.366012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.521203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.594400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.559441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706761834854787:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.559589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.560012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706761834854797:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.560092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.917114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.949856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.981991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.059022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.100275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.135741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.181521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.233658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.305979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706766129822965:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.306087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.306438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706766129822970:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.306525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706766129822971:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.306627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.310404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:34.322571Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706766129822974:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:34.410792Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706766129823025:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:35.160561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706748949951126:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:35.160688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:35.914363Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:35.914515Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C2D7F89CE28 2026-01-10T12:20:35.914558Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706770424790616:2531], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kekxkve9eh0svd89hv320vyf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODAzZGZjMTYtNzM4NzExZWMtYmYwMTRhODItYjU4MzlkOWQ=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:35.914725Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:35.914804Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706770424790616:2531], queueSize: 1 2026-01-10T12:20:35.915416Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706770424790616:2531], compileActor: [1:7593706770424790624:2536] 2026-01-10T12:20:35.915466Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:35.915521Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706770424790624:2536], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-10T12:20:35.915437Z 2026-01-10T12:20:36.052167Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706770424790624:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047636","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"95e2d394-c561ef02-2d3bf3e1-c5a47c60","version":"1.0"} 2026-01-10T12:20:36.052800Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706770424790624:2536], duration: 0.137343s 2026-01-10T12:20:36.052835Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706770424790624:2536], owner: [1:7593706761834854747:2384], status: SUCCESS, issues: , uid: 95e2d394-c561ef02-2d3bf3e1-c5a47c60 2026-01-10T12:20:36.052965Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706770424790616:2531], status: SUCCESS, compileActor: [1:7593706770424790624:2536] 2026-01-10T12:20:36.053037Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706770424790616:2531], queryUid: 95e2d394-c561ef02-2d3bf3e1-c5a47c60, status:SUCCESS >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TestProgram::YqlKernelStartsWith [GOOD] >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TSubscriberTest::Sync [GOOD] >> KqpCost::CTAS+isOlap [GOOD] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2026-01-10T12:19:55.155491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706602114945475:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:55.157035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:55.381132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.386280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:55.386362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:55.447434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:55.515300Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:55.517911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706602114945454:2081] 1768047595150051 != 1768047595150054 2026-01-10T12:19:55.569380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:55.673507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:55.747931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.869500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:55.918116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.615397Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706615352097559:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.615991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:58.648056Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.723394Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:58.757018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:58.757084Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:58.761442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:58.840338Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.979947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.986550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:19:59.002843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.067532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.111481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.302893Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706632234027552:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:02.302959Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:02.318812Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.376632Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:02.383887Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706632234027521:2081] 1768047602301795 != 1768047602301798 2026-01-10T12:20:02.416439Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:02.416523Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:02.421314Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:02.605682Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:02.615483Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:02.636161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.703353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.758494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.739723Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706645351426247:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:05.739854Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:05.766100Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:05.827833Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:05.855417Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:05.855478Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:05.858312Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:06.019732Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:06.036773Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.070359Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.090069Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemesha ... State: Disconnected -> Connecting 2026-01-10T12:20:17.661068Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:17.860393Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:17.933449Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:17.939732Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:17.954481Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.029605Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.087237Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.109734Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706716681249505:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:22.109838Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:22.160274Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:22.239533Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:22.283820Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:22.283928Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:22.297028Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:22.426283Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:22.541836Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:22.548450Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:22.567691Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.648093Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.712250Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.045254Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706736254004924:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:27.045339Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:27.077274Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:27.165311Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:27.165424Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:27.168117Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:27.170614Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706736254004891:2081] 1768047627043957 != 1768047627043960 2026-01-10T12:20:27.201578Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:27.254088Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:27.501396Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:27.522456Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.588222Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.644044Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.947782Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706755713818997:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:31.947830Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:31.968795Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:32.075323Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:32.091021Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:32.091149Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:32.092231Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706755713818969:2081] 1768047631946385 != 1768047631946388 2026-01-10T12:20:32.128791Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:32.234867Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:32.403395Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:32.409850Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:32.425995Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.501017Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.566461Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] Test command err: ... waiting for initial path lookups 2026-01-10T12:20:37.473656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.476662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:23:2066] 2026-01-10T12:20:37.476721Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:4:2051] Upsert description: path# TestPath 2026-01-10T12:20:37.476886Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:4:2051] Subscribe: subscriber# [1:23:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.477102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:24:2066] 2026-01-10T12:20:37.477132Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:5:2052] Upsert description: path# TestPath 2026-01-10T12:20:37.477189Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:5:2052] Subscribe: subscriber# [1:24:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.477318Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:25:2066] 2026-01-10T12:20:37.477360Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# TestPath 2026-01-10T12:20:37.477398Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:25:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.477464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-10T12:20:37.477525Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:23:2066] 2026-01-10T12:20:37.477588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-10T12:20:37.477702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:24:2066] 2026-01-10T12:20:37.477740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.477787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:25:2066] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-10T12:20:37.478073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-10T12:20:37.478997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-10T12:20:37.479084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-10T12:20:37.479148Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-10T12:20:37.479250Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-10T12:20:37.479453Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12345 2026-01-10T12:20:37.479534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-10T12:20:37.479574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12345 2026-01-10T12:20:37.479613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-10T12:20:37.479724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-10T12:20:37.479775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-10T12:20:37.479936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-10T12:20:37.479990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.480056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-10T12:20:37.480129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.480201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-10T12:20:37.480235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-10T12:20:37.480292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-10T12:20:37.480342Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Updating cluster state generation on replica: [1:24339059:0] 2026-01-10T12:20:37.480472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-10T12:20:37.480640Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:1240: [1:4:2051] Handle {EvUpdateGroupConfig GroupConfig: empty BoardConfig: empty SchemeBoardConfig: {RingGroups# [0:{NToSelect# 3 Rings# [0:{[[1:24339059:0]]} 1:{[[1:1099535966835:0]]} 2:{[[1:2199047594611:0]]}}] StateStorageVersion# 0 CompatibleVersions# [] ClusterStateGeneration# 1 ClusterStateGuid# 0}} 2026-01-10T12:20:37.480776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-10T12:20:37.480829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12346 2026-01-10T12:20:37.480897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-10T12:20:37.480931Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12346 2026-01-10T12:20:37.480983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-10T12:20:37.481028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.481111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:4:2051], cookie# 12346 2026-01-10T12:20:37.481158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-01-10T12:20:37.481248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:20:2066], cookie# 12346 2026-01-10T12:20:37.481813Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:952: [main][1:19:2066][TestPath] Cluster State mismatch in sync version response: sender# [1:20:2066], cookie# 12346, subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-10T12:20:37.481869Z node 1 :SCHEME_BOARD_S ... [TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:38.034686Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-10T12:20:38.034760Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-10T12:20:38.034819Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:38.035069Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:18:2065], cookie# 12345 ... waiting for some sync responses 2026-01-10T12:20:38.035152Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:20:2066] 2026-01-10T12:20:38.035222Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:21:2066] 2026-01-10T12:20:38.035279Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.035335Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:22:2066] 2026-01-10T12:20:38.035410Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.035519Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:20:2066], cookie# 12345 2026-01-10T12:20:38.035596Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:21:2066], cookie# 12345 2026-01-10T12:20:38.035641Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:22:2066], cookie# 12345 2026-01-10T12:20:38.035717Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-10T12:20:38.035761Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-10T12:20:38.035801Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... waiting for some sync responses (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:38.036161Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:20:2066], cookie# 12345 2026-01-10T12:20:38.036238Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:38.036368Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:38.036419Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:1041: [main][3:19:2066][TestPath] Delay current sync request: 12345 ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:38.036913Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:21:2066], cookie# 12345 2026-01-10T12:20:38.036955Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:21:2066], cookie# 12345, current cookie# 0 2026-01-10T12:20:38.036997Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:22:2066], cookie# 12345 2026-01-10T12:20:38.037020Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:22:2066], cookie# 12345, current cookie# 0 2026-01-10T12:20:38.037496Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-10T12:20:38.037565Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-10T12:20:38.037602Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] 2026-01-10T12:20:38.037669Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:27:2066] 2026-01-10T12:20:38.037723Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:28:2066] 2026-01-10T12:20:38.037791Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.037843Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2066] 2026-01-10T12:20:38.037890Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.037967Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:27:2066], cookie# 12345 2026-01-10T12:20:38.038045Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:28:2066], cookie# 12345 2026-01-10T12:20:38.038125Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2066], cookie# 12345 2026-01-10T12:20:38.038194Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-10T12:20:38.038236Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-10T12:20:38.038303Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 2026-01-10T12:20:38.038397Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:27:2066], cookie# 12345 2026-01-10T12:20:38.038436Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:38.038502Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:28:2066], cookie# 12345 2026-01-10T12:20:38.038541Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:38.038593Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2066], cookie# 12345 2026-01-10T12:20:38.038619Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:29:2066], cookie# 12345, current cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-10T12:20:37.478990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.480466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-10T12:20:37.480531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-10T12:20:37.480573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-10T12:20:37.480762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-10T12:20:37.480859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-10T12:20:37.480921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-10T12:20:37.480968Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.481005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-10T12:20:37.481032Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.481138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-10T12:20:37.481212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-10T12:20:37.481236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-10T12:20:37.481317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-10T12:20:37.481352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-10T12:20:37.481441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-10T12:20:37.481482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.481521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-10T12:20:37.481551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.481594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-10T12:20:37.481616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-10T12:20:37.481655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-10T12:20:37.481700Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:1099535966835:0] whose ring group state is: 0 2026-01-10T12:20:37.481784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-10T12:20:37.481881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-10T12:20:37.481931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-10T12:20:37.481972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-10T12:20:37.482001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.482048Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2026-01-10T12:20:37.482134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2026-01-10T12:20:37.482157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-10T12:20:37.482185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:21:2066], cookie# 12346 2026-01-10T12:20:37.482208Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-10T12:20:37.482234Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:19:2066][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-10T12:20:37.482275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-10T12:20:37.482309Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-10T12:20:37.752509Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[2:3298559222387:0], [2:4398070850163:0], [2:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-10T12:20:37.753554Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-10T12:20:37.753633Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-10T12:20:37.753688Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] 2026-01-10T12:20:37.753723Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:13:2060] 2026-01-10T12:20:37.753759Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:14:2061] 2026-01-10T12:20:37.753799Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie ... p# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.757757Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.757792Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [2:32:2075], cookie# 12346 2026-01-10T12:20:37.757818Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.757844Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-10T12:20:37.757887Z node 2 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][2:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-10T12:20:37.757964Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:32:2075] 2026-01-10T12:20:37.758030Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.758098Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:33:2075], cookie# 12346 2026-01-10T12:20:37.758129Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:28:2075][TestPath] Sync cookie mismatch: sender# [2:33:2075], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2026-01-10T12:20:38.044394Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-10T12:20:38.045105Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-10T12:20:38.045180Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-10T12:20:38.045230Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-10T12:20:38.045541Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-10T12:20:38.045651Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-10T12:20:38.045772Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-10T12:20:38.045842Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.045934Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-10T12:20:38.045981Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.046095Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-10T12:20:38.046180Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-10T12:20:38.046221Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-10T12:20:38.046330Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-10T12:20:38.046396Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-10T12:20:38.046557Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-10T12:20:38.046604Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:38.046657Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-10T12:20:38.046692Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:38.046773Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-10T12:20:38.046814Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-10T12:20:38.046871Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-10T12:20:38.046924Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-10T12:20:38.047076Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-10T12:20:38.047234Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-10T12:20:38.047303Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-10T12:20:38.047381Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-10T12:20:38.047442Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:38.047536Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-10T12:20:38.047609Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-10T12:20:38.047646Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-10T12:20:38.047725Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-10T12:20:38.047798Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-10T12:20:38.047874Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.047931Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-10T12:20:38.047960Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: ... waiting for initial path lookups 2026-01-10T12:20:37.473689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-10T12:20:37.478296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-10T12:20:37.478397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-10T12:20:37.478444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.478482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-10T12:20:37.478536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-10T12:20:37.478573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] Poisoning replica: [1:5497582477939:0] 2026-01-10T12:20:37.478956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12345 2026-01-10T12:20:37.479080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-10T12:20:37.479169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-10T12:20:37.479215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-10T12:20:37.479249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-10T12:20:37.479293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-10T12:20:37.479401Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-10T12:20:37.479520Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12345 2026-01-10T12:20:37.479730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12345 2026-01-10T12:20:37.479786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:31:2075], cookie# 12345 2026-01-10T12:20:37.479867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12345 2026-01-10T12:20:37.479921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12345 2026-01-10T12:20:37.479977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:34:2075], cookie# 12345 2026-01-10T12:20:37.480128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-10T12:20:37.480188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-10T12:20:37.480358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12345 2026-01-10T12:20:37.480407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12345 2026-01-10T12:20:37.480551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12345 2026-01-10T12:20:37.480619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.480682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12345 2026-01-10T12:20:37.480725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.480756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-10T12:20:37.480853Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12345 2026-01-10T12:20:37.480883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.480921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-10T12:20:37.480960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-10T12:20:37.481015Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.481065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12345 2026-01-10T12:20:37.481104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.481131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.481176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12345 2026-01-10T12:20:37.481204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.481259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.481315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12345 2026-01-10T12:20:37.481344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:28:2075][TestPath] Sync cookie mismatch: sender# [1:34:2075], cookie# 12345, current cookie# 0 2026-01-10T12:20:37.481431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-10T12:20:37.481476Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:1099535966835:0] whose ring group state is: 0 2026-01-10T12:20:37.481590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12346 2026-01-10T12:20:37.481752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12346 2026-01-10T12:20:37.481825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12346 2026-01-10T12:20:37.481893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12346 2026-01-10T12:20:37.481938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.481973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12346 2026-01-10T12:20:37.482027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12346 2026-01-10T12:20:37.482074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12346 2026-01-10T12:20:37.482104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.482176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2026-01-10T12:20:37.482267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12346 2026-01-10T12:20:37.482299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12346 2026-01-10T12:20:37.482358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12346 2026-01-10T12:20:37.482382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-10T12:20:37.482412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:30:2075], cookie# 12346 2026-01-10T12:20:37.482456Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-10T12:20:37.482493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.482532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-10T12:20:37.482581Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.482658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12346 2026-01-10T12:20:37.482704Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-10T12:20:37.482737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-01-10T12:20:37.482771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12346 2026-01-10T12:20:37.482797Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-10T12:20:37.482820Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-01-10T12:20:37.482842Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-10T12:20:37.736211Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.736889Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:3:2050] 2026-01-10T12:20:37.736988Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:6:2053] 2026-01-10T12:20:37.737040Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:9:2056] 2026-01-10T12:20:37.737100Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2066] 2026-01-10T12:20:37.737164Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:38:2066] 2026-01-10T12:20:37.737208Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:36:2066][path] Set up state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.737254Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:39:2066] 2026-01-10T12:20:37.737294Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:36:2066][path] Ignore empty state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-10T12:20:37.476327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.479266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-10T12:20:37.479381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-10T12:20:37.479460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:37.479770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-10T12:20:37.479871Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-10T12:20:37.479944Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-10T12:20:37.480046Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-10T12:20:37.480426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-10T12:20:37.480506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:20:2066] 2026-01-10T12:20:37.480564Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Update to strong state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-10T12:20:37.480882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-10T12:20:37.480945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:21:2066] 2026-01-10T12:20:37.481015Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-10T12:20:37.481234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-10T12:20:37.481291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:22:2066] 2026-01-10T12:20:37.481347Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-10T12:20:37.749645Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.750316Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-10T12:20:37.750396Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-10T12:20:37.750443Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:37.750743Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:29:2075] 2026-01-10T12:20:37.750820Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2026-01-10T12:20:37.750874Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:28:2075][TestPath] Set up state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.750919Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:31:2075] 2026-01-10T12:20:37.750960Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:24339059:0] 2026-01-10T12:20:37.751265Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:4:2051] 2026-01-10T12:20:37.751353Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:29:2075] 2026-01-10T12:20:37.751418Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Update to strong state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:1099535966835:0] 2026-01-10T12:20:37.751726Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:5:2052] 2026-01-10T12:20:37.751783Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:30:2075] 2026-01-10T12:20:37.751855Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:2199047594611:0] 2026-01-10T12:20:37.752106Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:6:2053] 2026-01-10T12:20:37.752168Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:31:2075] 2026-01-10T12:20:37.752216Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:3298559222387:0] Sending path update to replica: [2:4398070850163:0] Sending path update to replica: [2:5497582477939:0] ... waiting for initial path lookups 2026-01-10T12:20:38.031955Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-10T12:20:38.032679Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-10T12:20:38.032754Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-10T12:20:38.032792Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:38.033050Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-10T12:20:38.033135Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-10T12:20:38.033184Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.033234Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-10T12:20:38.033270Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:24339059:0] 2026-01-10T12:20:38.033585Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:4:2051] 2026-01-10T12:20:38.033659Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:29:2075] 2026-01-10T12:20:38.033719Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Update to strong state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:1099535966835:0] 2026-01-10T12:20:38.034004Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:5:2052] 2026-01-10T12:20:38.034066Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:30:2075] 2026-01-10T12:20:38.034121Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:2199047594611:0] 2026-01-10T12:20:38.034330Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:6:2053] 2026-01-10T12:20:38.034388Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:31:2075] 2026-01-10T12:20:38.034450Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:3298559222387:0] Sending path update to replica: [3:4398070850163:0] Sending path update to replica: [3:5497582477939:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-10T12:20:37.473645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-10T12:20:37.479114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-10T12:20:37.479239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-10T12:20:37.479285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.479326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-10T12:20:37.479373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-10T12:20:37.479412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-10T12:20:37.479840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-10T12:20:37.479992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-10T12:20:37.480029Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-10T12:20:37.480069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-10T12:20:37.482252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-10T12:20:37.482323Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.482399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-10T12:20:37.482456Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-10T12:20:37.482796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-10T12:20:37.482887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:29:2075] 2026-01-10T12:20:37.482947Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Update to strong state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-10T12:20:37.483264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-10T12:20:37.483339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:30:2075] 2026-01-10T12:20:37.483408Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-10T12:20:37.483656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-10T12:20:37.483718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:31:2075] 2026-01-10T12:20:37.483768Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:3298559222387:0] 2026-01-10T12:20:37.484032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:13:2060] 2026-01-10T12:20:37.484103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:32:2075] 2026-01-10T12:20:37.484240Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:4398070850163:0] 2026-01-10T12:20:37.484491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:14:2061] 2026-01-10T12:20:37.484567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:33:2075] 2026-01-10T12:20:37.484620Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:5497582477939:0] 2026-01-10T12:20:37.484871Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:15:2062] 2026-01-10T12:20:37.484950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:34:2075] 2026-01-10T12:20:37.485045Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 6) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-10T12:20:37.746271Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.746851Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [2:23:2066] 2026-01-10T12:20:37.746918Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:4:2051] Upsert description: path# TestPath 2026-01-1 ... Update { Owner: 1 Generation: 1 }: sender# [2:18:2065], cookie# 0, event size# 80 2026-01-10T12:20:37.756714Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:4:2051] Update description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:20:37.756772Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:4:2051] Upsert description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path TestPath, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 34} 2026-01-10T12:20:37.756898Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:4:2051] 2026-01-10T12:20:37.756953Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:23:2066] 2026-01-10T12:20:37.757065Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:520: [proxy][2:20:2066][TestPath] Cluster state mismatch in replica notification: sender# [2:23:2066], subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-10T12:20:37.757205Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:4:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: TestPath }: sender# [2:23:2066] 2026-01-10T12:20:37.757266Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:4:2051] Unsubscribe: subscriber# [2:23:2066], path# TestPath 2026-01-10T12:20:37.757328Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:20:2066] 2026-01-10T12:20:37.757398Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:19:2066][TestPath] Ignore empty state: owner# [2:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-10T12:20:38.025251Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:38.025786Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-10T12:20:38.025857Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-10T12:20:38.025896Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-10T12:20:38.026108Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-10T12:20:38.026219Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-10T12:20:38.026322Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-10T12:20:38.026387Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.026424Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-10T12:20:38.026449Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.026546Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-10T12:20:38.026605Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-10T12:20:38.026671Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-10T12:20:38.026735Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-10T12:20:38.026774Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-10T12:20:38.026931Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-10T12:20:38.026990Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:38.027031Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-10T12:20:38.027068Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:38.027123Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-10T12:20:38.027148Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-10T12:20:38.027190Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-10T12:20:38.027235Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-10T12:20:38.027374Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-10T12:20:38.027535Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-10T12:20:38.027591Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-10T12:20:38.027651Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-10T12:20:38.027698Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:38.027776Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-10T12:20:38.027852Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-10T12:20:38.027900Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-10T12:20:38.027929Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-10T12:20:38.027988Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-10T12:20:38.028032Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:38.028070Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-10T12:20:38.028122Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2026-01-10T12:20:37.474087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.477907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-10T12:20:37.478013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.478058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-10T12:20:37.478135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-10T12:20:37.478209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-10T12:20:37.478259Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.478324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-10T12:20:37.478382Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.478622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 1 2026-01-10T12:20:37.479006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2026-01-10T12:20:37.479073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2026-01-10T12:20:37.479126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 1 2026-01-10T12:20:37.479285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:6:2053], cookie# 1 2026-01-10T12:20:37.479337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 1 2026-01-10T12:20:37.479421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 1 2026-01-10T12:20:37.479474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.479529Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-10T12:20:37.479590Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:38:2066], cookie# 1 2026-01-10T12:20:37.479698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-10T12:20:37.479732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 1 2026-01-10T12:20:37.479796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:36:2066][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-10T12:20:37.479945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 2 2026-01-10T12:20:37.480054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 2 2026-01-10T12:20:37.480103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.480139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 2 2026-01-10T12:20:37.480186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 2 2026-01-10T12:20:37.480309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 2 2026-01-10T12:20:37.480393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 2 2026-01-10T12:20:37.480431Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-10T12:20:37.480469Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 2 2026-01-10T12:20:37.480557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-10T12:20:37.480614Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.480660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 2 2026-01-10T12:20:37.480703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 2, current cookie# 0 2026-01-10T12:20:37.480810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 3 2026-01-10T12:20:37.480936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 3 2026-01-10T12:20:37.480969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-10T12:20:37.481019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 3 2026-01-10T12:20:37.481064Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-10T12:20:37.481094Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 3 2026-01-10T12:20:37.481145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 3 2026-01-10T12:20:37.481262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:39:2066], cookie# 3 2026-01-10T12:20:37.481288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 3, current cookie# 0 2026-01-10T12:20:37.481333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-10T12:20:37.481378Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.958362Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.959091Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:3:2050] 2026-01-10T12:20:37.959196Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:6:2053] 2026-01-10T12:20:37.959245Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:9:2056] 2026-01-10T12:20:37.959314Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:38:2067] 2026-01-10T12:20:37.959408Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:39:2067] 2026-01-10T12:20:37.959490Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.959614Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:40:2067] 2026-01-10T12:20:37.959677Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.959852Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-10T12:20:37.959982Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-10T12:20:37.960097Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-10T12:20:37.960150Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-10T12:20:37.960246Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-10T12:20:37.960300Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-10T12:20:37.960339Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-10T12:20:37.960401Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-10T12:20:37.960466Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.960512Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-10T12:20:37.960549Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.960602Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-10T12:20:37.960630Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2026-01-10T12:20:37.499958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.501989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-10T12:20:37.502088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2026-01-10T12:20:37.502131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2026-01-10T12:20:37.502226Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2067] 2026-01-10T12:20:37.502286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:39:2067] 2026-01-10T12:20:37.502337Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:37:2067][path] Set up state: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.502431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:40:2067] 2026-01-10T12:20:37.502515Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.502880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2026-01-10T12:20:37.502960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2026-01-10T12:20:37.503019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2067] 2026-01-10T12:20:37.503109Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:37:2067][path] Path was updated to new version: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.503195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:39:2067] 2026-01-10T12:20:37.503250Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.961981Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.962598Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-10T12:20:37.962645Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-10T12:20:37.962672Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-10T12:20:37.962715Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2066] 2026-01-10T12:20:37.962760Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:38:2066] 2026-01-10T12:20:37.962808Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:36:2066][path] Set up state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.962855Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:39:2066] 2026-01-10T12:20:37.962902Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:36:2066][path] Ignore empty state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.963026Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] 2026-01-10T12:20:37.963061Z node 3 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:818: [main][3:36:2066][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2026-01-10T12:20:37.473744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.477580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-10T12:20:37.477685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.477730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-10T12:20:37.477803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-10T12:20:37.477938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-10T12:20:37.477984Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.478042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-10T12:20:37.478092Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.479616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-10T12:20:37.479701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2026-01-10T12:20:37.479756Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.965638Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][4:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.967581Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-10T12:20:37.967656Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-10T12:20:37.967722Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-10T12:20:37.967921Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-10T12:20:37.967990Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-10T12:20:37.968042Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:36:2066][path] Set up state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.968148Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-10T12:20:37.968207Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.969557Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-10T12:20:37.969636Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.969680Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-10T12:20:37.969739Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.969783Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-10T12:20:37.969830Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.981068Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-10T12:20:37.981199Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-10T12:20:37.981268Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.981308Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:48:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-10T12:20:37.981368Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-10T12:20:37.981390Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.981462Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:49:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-10T12:20:37.981526Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-10T12:20:37.981559Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.982030Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-10T12:20:37.982106Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [4:37:2066] 2026-01-10T12:20:37.982156Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][4:36:2066][path] Update to strong state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2026-01-10T12:20:37.497853Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.500467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-10T12:20:37.500589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.500638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-10T12:20:37.500733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-10T12:20:37.500815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-10T12:20:37.500862Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.500921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-10T12:20:37.500974Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.502182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-10T12:20:37.502279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-10T12:20:37.502338Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.502531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.502605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-10T12:20:37.502654Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.961903Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.962508Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-10T12:20:37.962571Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:6:2053] 2026-01-10T12:20:37.962608Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:9:2056] 2026-01-10T12:20:37.962666Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:38:2067] 2026-01-10T12:20:37.962715Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:39:2067] 2026-01-10T12:20:37.962774Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.962898Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:40:2067] 2026-01-10T12:20:37.962952Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.963084Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-10T12:20:37.963172Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-10T12:20:37.963240Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-10T12:20:37.963278Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-10T12:20:37.963429Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-10T12:20:37.963485Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-10T12:20:37.963514Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-10T12:20:37.963586Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-10T12:20:37.963655Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:20:37.963697Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-10T12:20:37.963741Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:20:37.963821Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-10T12:20:37.963846Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 >> KqpCompileFallback::FallbackWithScanQuery [GOOD] >> TestProgram::JsonExistsBinary [GOOD] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:52:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:54:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13 ... recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:52:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:54:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:54:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:82:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:85:2057] recipient: [63:84:2115] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:87:2057] recipient: [63:84:2115] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:86:2116] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:202:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:82:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:85:2057] recipient: [64:84:2115] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:87:2057] recipient: [64:84:2115] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:86:2116] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:202:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:83:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:86:2057] recipient: [65:85:2115] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:88:2057] recipient: [65:85:2115] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:87:2116] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:203:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:84:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:87:2057] recipient: [66:86:2116] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:89:2057] recipient: [66:86:2116] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:88:2117] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:108:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:109:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:88:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:91:2057] recipient: [68:90:2120] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:93:2057] recipient: [68:90:2120] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:92:2121] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:208:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] !Reboot 72057594037927937 (actor [69:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:88:2057] recipient: [69:39:2086] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:91:2057] recipient: [69:90:2120] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:93:2057] recipient: [69:90:2120] !Reboot 72057594037927937 (actor [69:58:2099]) rebooted! !Reboot 72057594037927937 (actor [69:58:2099]) tablet resolver refreshed! new actor is[69:92:2121] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:208:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:52:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:59:2057] recipient: [70:52:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:76:2057] recipient: [70:14:2061] !Reboot 72057594037927937 (actor [70:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:89:2057] recipient: [70:39:2086] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:92:2057] recipient: [70:91:2120] Leader for TabletID 72057594037927937 is [70:93:2121] sender: [70:94:2057] recipient: [70:91:2120] !Reboot 72057594037927937 (actor [70:58:2099]) rebooted! !Reboot 72057594037927937 (actor [70:58:2099]) tablet resolver refreshed! new actor is[70:93:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:59:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:76:2057] recipient: [71:14:2061] |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] Test command err: Trying to start YDB, gRPC: 15803, MsgBus: 23886 2026-01-10T12:20:31.340360Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706753422412847:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:31.341377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:31.553976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:31.646987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:31.647114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:31.670794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:31.672228Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706753422412821:2081] 1768047631337750 != 1768047631337753 2026-01-10T12:20:31.682246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:31.743352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:31.746173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:31.746198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:31.746206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:31.746289Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:32.183879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:32.195067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:32.232952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.348985Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:32.376359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.540537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.608536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.666051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706766307316591:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.666175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.666574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706766307316601:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:34.666638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.012972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.042375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.071562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.102226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.128492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.168688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.200131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.247199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.314311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706770602284764:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.314391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.314477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706770602284770:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.314605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706770602284772:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.314648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.318354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:35.328942Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706770602284774:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:35.427140Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706770602284827:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:36.339586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706753422412847:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:36.339689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:37.002342Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:37.002421Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C7B837226B8 2026-01-10T12:20:37.002448Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706774897252417:2531], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", keepInCache: 1, split: 0{ TraceId: 01kekxkwg91e6hjwqjcm7t9rze, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkZThhNjEtZmVkNWI1ZjctMmQ2NDc2MzQtZGMzZGU2YzY=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:37.002581Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:37.002627Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706774897252417:2531], queueSize: 1 2026-01-10T12:20:37.003228Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n 2026-01-10T12:20:37.003264Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706774897252417:2531], compileActor: [1:7593706779192219721:2536] 2026-01-10T12:20:37.003304Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:37.003339Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706779192219721:2536], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", startTime: 2026-01-10T12:20:37.003294Z 2026-01-10T12:20:37.023877Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7593706779192219721:2536], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n " 2026-01-10T12:20:37.197164Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706779192219721:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047637","query_text":"\\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"93c87880-79508845-653a6606-c2f8d54d","version":"1.0"} 2026-01-10T12:20:37.197649Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706779192219721:2536], duration: 0.194329s 2026-01-10T12:20:37.197681Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706779192219721:2536], owner: [1:7593706766307316550:2383], status: SUCCESS, issues: , uid: 93c87880-79508845-653a6606-c2f8d54d 2026-01-10T12:20:37.197836Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706774897252417:2531], status: SUCCESS, compileActor: [1:7593706779192219721:2536] 2026-01-10T12:20:37.198113Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:37.198266Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706774897252417:2531], queryUid: 93c87880-79508845-653a6606-c2f8d54d, status:SUCCESS 2026-01-10T12:20:37.218052Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1223: Served query from cache by uid, sender: [1:7593706774897252417:2531], queryUid: 93c87880-79508845-653a6606-c2f8d54d >> KqpCompileFallback::FallbackMechanismWorks [GOOD] >> TLocksTest::CK_BrokenLock [GOOD] >> KqpCost::CTASWithRetry+isOlap [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] >> KqpCost::CTASWithRetry-isOlap [GOOD] >> TestProgram::NumRowsWithNulls >> TestProgram::YqlKernelEquals ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::NumRowsWithNulls [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists >> TestProgram::YqlKernelEquals [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 7400, MsgBus: 65051 2026-01-10T12:20:27.848295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706739029316905:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:27.848369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:28.124192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:28.126022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:28.126161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:28.150770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:28.222791Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706739029316875:2081] 1768047627846591 != 1768047627846594 2026-01-10T12:20:28.230692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:28.294577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:28.294606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:28.294616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:28.294731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:28.403872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:28.725187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:28.742777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:28.802569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.862154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:28.943847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.106009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.175373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.190569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706756209187936:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.190692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.191157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706756209187946:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.191255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.561359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.607631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.640193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.673655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.707351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.746817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.780420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.827156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.907901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706756209188817:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.907991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.908345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706756209188822:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.908387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706756209188823:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.908535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:31.912491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:31.924919Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706756209188826:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:31.987059Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706756209188877:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:32.848602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706739029316905:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:32.848723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:33.684636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157714Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157729Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157751Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157772Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157781Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157812Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157814Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157840Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157856Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157877Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157900Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157922Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157955Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157970Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.157996Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037939 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158015Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158033Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037937 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158080Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158105Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158141Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158153Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158191Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158195Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158238Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158238Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158282Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158284Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158326Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158329Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158376Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158379Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158419Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158441Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158461Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158486Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158505Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158534Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158547Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158577Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158591Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-10T12:20:37.158624Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 query_phases { duration_us: 171078 table_access { name: "/Root/.tmp/sessions/783133bf-49cf-77a7-7e03-9daa2764fe38/Root/TestTable2_9d3322da-4a97-7392-168e-9e909d8288da" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 17954 } compilation { duration_us: 9941 cpu_time_us: 5903 } process_cpu_time_us: 1086 total_duration_us: 2930115 total_cpu_time_us: 24943 2026-01-10T12:20:37.176506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:20:37.182299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2026-01-10T12:19:55.847274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706598475574634:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:55.847355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:55.871423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:56.093720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.099264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:56.099435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:56.123615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:56.209164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:56.211150Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706598475574530:2081] 1768047595843535 != 1768047595843538 2026-01-10T12:19:56.283805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:56.397623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:56.479195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.642943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:56.697693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.258708Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706615644152958:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:59.259943Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:59.286499Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.363948Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:59.411692Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:59.411787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:59.413824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:59.457075Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:59.581680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:59.598719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.651002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.695339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:02.891878Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706632442861017:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:02.892602Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:02.955901Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.015001Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:03.016507Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706632442860903:2081] 1768047602881850 != 1768047602881853 2026-01-10T12:20:03.054489Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:03.054572Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:03.057424Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:03.218122Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.236215Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.240883Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:03.259450Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.336618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.381335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.679938Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706646786304773:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:06.679997Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:06.704207Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.789915Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:06.823672Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:06.823755Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:06.835367Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:06.923870Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:07.016152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:07.040000Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... 0: Table profiles were not loaded 2026-01-10T12:20:18.770906Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:18.857763Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:19.004697Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:19.023996Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.109041Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:19.168133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.317443Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706719610708375:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:23.317516Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:23.333532Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:23.423523Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:23.425499Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706719610708348:2081] 1768047623316259 != 1768047623316262 2026-01-10T12:20:23.478020Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:23.478134Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:23.484578Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:23.553444Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:23.719775Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:23.744390Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.822648Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:23.875607Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.959137Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706738855603288:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:27.960395Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:28.091926Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:28.100455Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:28.133013Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:28.133122Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:28.144983Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:28.290943Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:28.424861Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:28.441717Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:28.454472Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:20:28.462995Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.531643Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.664836Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.991713Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706757916000629:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:32.991923Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:33.016548Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:33.123946Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706757916000591:2081] 1768047632989649 != 1768047632989652 2026-01-10T12:20:33.125385Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:33.130213Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:33.130323Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:33.135399Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:33.239900Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:33.455573Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:33.461903Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:33.483468Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.625980Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.684371Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TestProgram::JsonExists [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.5%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 7259, MsgBus: 10338 2026-01-10T12:20:33.086360Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706762677419890:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:33.086422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:33.373577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:33.373720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:33.376053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:33.445480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:33.453398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:33.465792Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:20:33.465815Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:20:33.524990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:33.525561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:33.525577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:33.525705Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:33.717740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:33.973270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:33.983833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:34.037098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.108228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:34.159330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.333284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.402313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.255439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706775562323529:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.255573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.255906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706775562323539:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.255997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.544786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.572286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.601721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.630442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.659293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.691738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.740683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.780562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.853294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706775562324406:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.853387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.853655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706775562324412:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.853677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706775562324411:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.853700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.857881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:36.868941Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706775562324415:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:36.925989Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706775562324466:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:38.085886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706762677419890:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:38.085984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:38.326655Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.326772Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C2779A0B378 2026-01-10T12:20:38.326810Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706784152259353:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", keepInCache: 1, split: 0{ TraceId: 01kekxkxsn8gvveqtwn8rg6zw8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2NWVlNzQtNDMyOWRjY2EtZWJmMTU1NmMtZjVlNzYxMTM=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:38.326970Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.327012Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706784152259353:2531], queueSize: 1 2026-01-10T12:20:38.327441Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n 2026-01-10T12:20:38.327497Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706784152259353:2531], compileActor: [1:7593706784152259362:2537] 2026-01-10T12:20:38.327511Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:38.327551Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706784152259362:2537], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", startTime: 2026-01-10T12:20:38.327495Z 2026-01-10T12:20:38.444036Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706784152259362:2537]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047638","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (0, +∞)\"],\"type\":\"Scan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"a3f6849b-963d3ffc-1c7b1f8c-53983266","version":"1.0"} 2026-01-10T12:20:38.444559Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706784152259362:2537], duration: 0.117038s 2026-01-10T12:20:38.444611Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706784152259362:2537], owner: [1:7593706775562323488:2383], status: SUCCESS, issues: , uid: a3f6849b-963d3ffc-1c7b1f8c-53983266 2026-01-10T12:20:38.444836Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706784152259353:2531], status: SUCCESS, compileActor: [1:7593706784152259362:2537] 2026-01-10T12:20:38.445063Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.445185Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706784152259353:2531], queryUid: a3f6849b-963d3ffc-1c7b1f8c-53983266, status:SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"a\":true,\"p\":{\"function\":{\"function\":\"NumRows\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"a":true,"p":{"function":{"function":"NumRows","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Filter; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] Test command err: Trying to start YDB, gRPC: 29792, MsgBus: 2166 2026-01-10T12:20:33.154401Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706763697358022:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:33.155145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:33.421727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:33.422020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:33.450693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:33.470786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:33.513124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:33.603610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:33.603637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:33.603649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:33.603761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:33.734960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:34.070741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:34.081887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:34.136807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.160297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:34.294743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.438116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.519927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.318253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776582261731:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.318379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.318714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776582261741:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.318770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.617814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.645946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.673254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.701683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.734243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.767479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.798181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.868671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.937884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776582262610:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.937951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.938060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776582262615:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.938063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776582262617:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.938122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.941540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:36.951706Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706776582262619:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:37.036314Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706780877229966:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:38.147462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706763697358022:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:38.147555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:38.474527Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.474651Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C0274B23F18 2026-01-10T12:20:38.474689Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706785172197558:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kekxkxy9aa3s75pq4wcng6fn, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2E3YzdiMmUtMTY2MTIwMjctZjc4ZTZlNDItZjc5YTJhNmY=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:38.474851Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.474898Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706785172197558:2531], queueSize: 1 2026-01-10T12:20:38.475491Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706785172197558:2531], compileActor: [1:7593706785172197566:2536] 2026-01-10T12:20:38.475514Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:38.475553Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706785172197566:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-10T12:20:38.475486Z 2026-01-10T12:20:38.592927Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706785172197566:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047638","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"1c837089-de47d548-3ab1ac50-ab70c724","version":"1.0"} 2026-01-10T12:20:38.593355Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706785172197566:2536], duration: 0.117849s 2026-01-10T12:20:38.593380Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706785172197566:2536], owner: [1:7593706776582261691:2384], status: SUCCESS, issues: , uid: 1c837089-de47d548-3ab1ac50-ab70c724 2026-01-10T12:20:38.593485Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706785172197558:2531], status: SUCCESS, compileActor: [1:7593706785172197566:2536] 2026-01-10T12:20:38.593564Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706785172197558:2531], queryUid: 1c837089-de47d548-3ab1ac50-ab70c724, status:SUCCESS |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks [GOOD] Test command err: Trying to start YDB, gRPC: 62455, MsgBus: 11541 2026-01-10T12:20:32.981494Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706759564446431:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:32.981572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:33.252573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:33.287695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:33.287812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:33.344266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706759564446404:2081] 1768047632979586 != 1768047632979589 2026-01-10T12:20:33.359100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:33.365941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:33.488502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:33.523662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:33.523691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:33.523698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:33.523801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:33.985755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:33.989886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:33.991711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:34.049324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.202694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.344151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.424611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.259745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776744317462:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.259886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.260248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776744317472:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.260324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.539882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.565725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.591240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.617397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.645235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.675825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.708377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.761545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.884476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776744318350:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.884595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.884833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776744318356:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.884855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706776744318355:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.884921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:36.889034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:36.900180Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706776744318359:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:36.981602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706776744318410:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:37.981808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706759564446431:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:37.983093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:38.286123Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.286232Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C34DA537928 2026-01-10T12:20:38.286267Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7593706785334253297:2531], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kekxkxrd760k9ysqbsh73vdn, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OThjZmYyYTItNjA2OWRmNWMtYWE5ODVjN2MtNmJhYWVmMzQ=, PoolId: default, IsStreamingQuery: 0} 2026-01-10T12:20:38.286431Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-10T12:20:38.286481Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7593706785334253297:2531], queueSize: 1 2026-01-10T12:20:38.287031Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n 2026-01-10T12:20:38.287083Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7593706785334253297:2531], compileActor: [1:7593706785334253305:2536] 2026-01-10T12:20:38.287139Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-10T12:20:38.287210Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7593706785334253305:2536], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-10T12:20:38.287109Z 2026-01-10T12:20:38.309994Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7593706785334253305:2536], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n " 2026-01-10T12:20:38.526360Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7593706785334253305:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1768047638","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"ef06eb12-927ab45b-968408d0-1b1aa7c0","version":"1.0"} 2026-01-10T12:20:38.527148Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7593706785334253305:2536], duration: 0.240010s 2026-01-10T12:20:38.527197Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7593706785334253305:2536], owner: [1:7593706776744317422:2384], status: SUCCESS, issues: , uid: ef06eb12-927ab45b-968408d0-1b1aa7c0 2026-01-10T12:20:38.527346Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7593706785334253297:2531], status: SUCCESS, compileActor: [1:7593706785334253305:2536] 2026-01-10T12:20:38.527433Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7593706785334253297:2531], queryUid: ef06eb12-927ab45b-968408d0-1b1aa7c0, status:SUCCESS >> TestProgram::JsonValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 3122, MsgBus: 6083 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:22.698103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:22.803806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:22.813464Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:22.813929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:22.814030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:23.130038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:23.130238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:23.136762Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047619718172 != 1768047619718175 2026-01-10T12:20:23.148758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:23.194372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:23.320933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:23.673317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:23.673402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:23.673451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:23.674156Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:23.688295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:24.001538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:24.063062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.370052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.750085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.016570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.875812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1905:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.876420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.877509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1979:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.877612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:25.913471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.097209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.343452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.628995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.870868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.136431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.439652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.729716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:28.094621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2786:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.094745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.095180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.095263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.095356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:28.101354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:28.277677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2795:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:28.334756Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2855:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:30.184592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cp ... nt type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285415Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285481Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285539Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285609Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285684Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285742Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285799Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285842Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285897Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.285937Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286006Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286073Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286127Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286183Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286240Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286298Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286352Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286439Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286526Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286580Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286636Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286693Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286749Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286802Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286887Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.286942Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287023Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037962 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287091Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287136Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287190Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287250Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037958 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287305Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037957 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287382Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037955 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287440Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287531Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037953 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.287581Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289514Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037947 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289596Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289659Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037945 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289720Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037943 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289765Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037997 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289860Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289938Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-10T12:20:38.289995Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037999 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 query_phases { duration_us: 1768047626867787 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_f8164771-4843-8939-7b9e-2e84f29974be" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 15850 } compilation { duration_us: 12444 cpu_time_us: 6883 } process_cpu_time_us: 1263 total_duration_us: 6644254 total_cpu_time_us: 23996 |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 6127, MsgBus: 5110 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:29.958933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:30.087370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:30.096818Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:30.097235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:30.097336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:30.486567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:30.486720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:30.493054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047627029326 != 1768047627029329 2026-01-10T12:20:30.511041Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:30.560739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:30.698102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:31.058923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:31.058988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:31.059035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:31.059547Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:31.072038Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:31.411505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:31.500466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.775575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.151239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:32.431270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.291459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.292121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.293188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.293305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:33.333414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.566708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:33.814305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.099333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.337065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.614352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.872296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.180049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.525846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.525958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.526358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.526426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.526490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:35.531931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:35.697525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:35.746390Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:37.370439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:38.207447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 1768047628127216 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_2861ff7c-459e-032f-24e6-a28dfd6c4e70" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 1485 affected_shards: 1 } compilation { duration_us: 11416 cpu_time_us: 6409 } process_cpu_time_us: 1513 total_duration_us: 1040485 total_cpu_time_us: 9407 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT >> TestProgram::SimpleFunction >> TestProgram::YqlKernelEndsWith |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TestProgram::Like [GOOD] >> TestProgram::CountUIDByVAT [GOOD] >> TestProgram::SimpleFunction [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2026-01-10T12:19:57.445493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706610264609097:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:57.445690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:57.685211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:57.685314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:57.687153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.689946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:57.799879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:57.840776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:57.995101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.059868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.192556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.250835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.883105Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706622418796215:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:00.883189Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.902424Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.971854Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706622418796193:2081] 1768047600880565 != 1768047600880568 2026-01-10T12:20:00.978425Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:01.016418Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:01.016535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:01.022547Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:01.172112Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.219655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:01.234811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.286734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.330874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.375227Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706639993759491:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:04.375308Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:04.386158Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.446947Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:04.480858Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:04.480948Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:04.485849Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:04.661296Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:04.673246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:04.694982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.747475Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:04.792917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.004354Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706657829875227:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:08.004636Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:08.050844Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.119652Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:08.119736Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:08.132501Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:08.147206Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:08.267481Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.365171Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:08.383092Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.444292Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.499304Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp ... 1-10T12:20:20.760194Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:20.876360Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:20.883164Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:20.902470Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.000767Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.067544Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:24.770996Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706723183126516:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:24.771312Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:24.797102Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:24.901807Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:24.904724Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706723183126396:2081] 1768047624762270 != 1768047624762273 2026-01-10T12:20:24.921198Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:24.921323Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:24.953189Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:24.960180Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:25.228581Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:25.233365Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:25.249500Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.327386Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.398850Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.522262Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706744691109701:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:29.522327Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:29.541808Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:29.648769Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:29.648873Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:29.656581Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:29.660019Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706744691109673:2081] 1768047629520987 != 1768047629520990 2026-01-10T12:20:29.699569Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:29.792749Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:30.017464Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:30.025014Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:30.037567Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:30.044576Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.130310Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.193203Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.604135Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706766816720859:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:34.604210Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:34.718966Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:34.729925Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:34.759935Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:34.760037Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:34.774629Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:34.966791Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:35.103020Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:35.124711Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.197218Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.263034Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"2,4\",\"p\":{\"options\":[\"{10001(Count):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[4]},\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"a":true,"i":"2,4","p":{"options":["{10001(Count):[2]}"],"type":"AGGREGATION","keys":[4]},"o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TA] $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2026-01-10T12:19:58.025602Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706611541931406:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:58.025973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:58.310047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.310479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:58.310585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:58.312667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:58.420143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706607246963997:2081] 1768047597990834 != 1768047597990837 2026-01-10T12:19:58.426817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:58.497275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:58.610042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:58.675548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.827956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:58.872623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:59.024220Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:01.548549Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706626655922613:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:01.548603Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:01.567335Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.639587Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:01.640855Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706626655922586:2081] 1768047601547225 != 1768047601547228 2026-01-10T12:20:01.677509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:01.677586Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:01.682412Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:01.727082Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:01.831687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:01.856697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.919759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:01.968750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.136199Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706642750497078:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:05.136280Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:05.147769Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:05.203133Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706642750497051:2081] 1768047605135393 != 1768047605135396 2026-01-10T12:20:05.203298Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:05.210227Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:05.210288Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:05.223589Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:05.369612Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:05.408221Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:05.428610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.474119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:05.517094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:08.455214Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706654307288516:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:08.455293Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:08.474057Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.559246Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706654307288488:2081] 1768047608454652 != 1768047608454655 2026-01-10T12:20:08.561468Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:08.570047Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:08.570166Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:08.597681Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:08.764322Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:08.797982Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:08.817011Z node 4 :FLAT_TX_SCHEMESHARD WARN: ... nected -> Connecting 2026-01-10T12:20:20.978601Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:20.999713Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:21.277007Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:21.283377Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:21.298852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.373540Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.424469Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.149999Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706727630055381:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:25.150104Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:25.167579Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:25.257501Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:25.268184Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7593706727630055353:2081] 1768047625148666 != 1768047625148669 2026-01-10T12:20:25.292955Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:25.293102Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:25.318299Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:25.410969Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:25.617960Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:25.642353Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.768035Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:25.823728Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:29.781120Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706744926814966:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:29.781195Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:29.804755Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:29.923739Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:29.923850Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:29.929052Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706744926814922:2081] 1768047629779902 != 1768047629779905 2026-01-10T12:20:29.929601Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:29.968190Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:30.060193Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:30.285529Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:30.291916Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:30.309241Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.387119Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:30.461900Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:34.934815Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706769095285277:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:34.934882Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:34.955181Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:35.029843Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:35.035816Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706769095285248:2081] 1768047634933607 != 1768047634933610 2026-01-10T12:20:35.067267Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:35.067351Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:35.074217Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:35.142668Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:35.413325Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:35.433640Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.505645Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:35.563184Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000 ... 04\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> KqpTx::LocksAbortOnCommit >> KqpSinkTx::ExplicitTcl >> KqpSinkMvcc::WriteSkewInsert-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\" ... ?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2026-01-10T12:19:59.752982Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706619674235093:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:59.753074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:00.077452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:00.077572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:00.085221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.085486Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706619674235070:2081] 1768047599752044 != 1768047599752047 2026-01-10T12:20:00.099927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:00.101342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:00.267698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:00.328626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:00.420762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.538848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:00.589923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.114911Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706633609161662:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:03.117721Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:03.130840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:03.224177Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706633609161637:2081] 1768047603113640 != 1768047603113643 2026-01-10T12:20:03.227406Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:03.236448Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.267889Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:03.268003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:03.271262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:03.467786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:03.482643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.536063Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:03.543994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:03.595399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.370826Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706648780936974:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:06.370910Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:06.383904Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.450075Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:06.451530Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706648780936947:2081] 1768047606370058 != 1768047606370061 2026-01-10T12:20:06.463060Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:06.463140Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:06.487554Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:06.643464Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:06.678785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:06.684873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:06.698600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.754602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:06.804617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:09.869665Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706661867939080:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:09.869734Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:09.888979Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:09.942386Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:09.987269Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:09.987331Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:09.990115Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:10.070713Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:10.187133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10 ... iles were not loaded 2026-01-10T12:20:22.164187Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7593706709991917501:2081] 1768047621994962 != 1768047621994965 2026-01-10T12:20:22.167899Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:22.314204Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:22.424437Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:22.432152Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:22.448014Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.522362Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:22.575470Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:26.642757Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7593706732635414405:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:26.643066Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:26.664193Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:26.742649Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:26.770838Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:26.770944Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:26.783752Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:26.857743Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:27.060335Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:27.072346Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:27.089910Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.172617Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:27.277378Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.415391Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7593706755760480611:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:31.415461Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:31.442983Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:31.547316Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:31.564448Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7593706755760480586:2081] 1768047631414246 != 1768047631414249 2026-01-10T12:20:31.568035Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:31.568236Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:31.607292Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:31.736180Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:31.842317Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:31.861272Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.928766Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:31.986553Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.177017Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593706774857474989:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:36.177091Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:36.202228Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:20:36.303819Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:36.305536Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593706774857474961:2081] 1768047636175481 != 1768047636175484 2026-01-10T12:20:36.317168Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:36.317272Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:36.345840Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:36.467569Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:20:36.568405Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:36.588505Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.665824Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:36.727592Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSnapshotIsolation::TSimpleOltp >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> KqpLocksTricky::TestNoWrite >> KqpSinkMvcc::DirtyReads-IsOlap |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpSnapshotIsolation::TReadOnlyOlap >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpSinkTx::OlapSnapshotRO >> KqpLocks::InvalidateOnCommit >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite >> KqpSinkLocks::VisibleUncommittedRowsUpdate >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSinkLocks::OlapVisibleUncommittedRows >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpSinkMvcc::TxReadsCommitted-IsOlap >> KqpSinkMvcc::TxReadsCommitted+IsOlap >> KqpSnapshotIsolation::TReadOnlyOltp >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpTx::TooManyTx >> KqpSinkMvcc::WriteSkewUpsert+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: 2026-01-10T12:19:29.059778Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:19:29.152341Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:29.152422Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:29.152507Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.152584Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:19:29.176451Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.197435Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:19:29.198601Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:19:29.201237Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:19:29.203630Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:19:29.205621Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:19:29.215883Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.216462Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4d92792e-cde80e05-21539a1e-d5e72d88_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.222805Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|44aa0eab-fd64177a-c3eb6222-4add6084_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.252918Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.253409Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bea8f482-d31158fe-4c78d0c1-f48fd25_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.265062Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.265478Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|577fbd8-388ed532-64e88301-b0108768_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.274076Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.274459Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f2395be-edb1feb9-ebbd6e43-9e941ff7_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.281961Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:19:29.282253Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dde94619-31f3e8bf-e153e35e-96af294_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:19:29.710832Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-10T12:19:29.761639Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:29.761750Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:29.761808Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.761878Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:185:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:188:2057] recipient: [2:187:2197] Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:190:2057] recipient: [2:187:2197] 2026-01-10T12:19:29.811318Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:19:29.811411Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:19:29.811473Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:29.811535Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:189:2198] 2026-01-10T12:19:29.832911Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:29.895242Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:29.926550Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:29.937336Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:29.968436Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.020391Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.064391Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.178355Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.209894Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.490419Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.532142Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:30.854547Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:31.135489Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:19:31.208382Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:270:2057] recipient: [2:14:2061] 2026-01-10T12:19:31.403306Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:19:31.404326Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-10T12:19:31.405195Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:276:2198] 2026-01-10T12:19:31.407385Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition ... VER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:20:40.710560Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:20:40.817759Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:20:40.949010Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:20:41.183491Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:20:41.293478Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [47:292:2279] sender: [47:394:2057] recipient: [47:14:2061] 2026-01-10T12:20:41.930455Z node 48 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 48 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:112:2057] recipient: [48:105:2138] 2026-01-10T12:20:42.002196Z node 48 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:42.002272Z node 48 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:42.002330Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:42.002395Z node 48 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927938 is [48:157:2176] sender: [48:158:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:183:2057] recipient: [48:14:2061] 2026-01-10T12:20:42.028554Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:42.029572Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 48 actor [48:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 } 2026-01-10T12:20:42.030771Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [48:189:2142] 2026-01-10T12:20:42.035238Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:189:2142] 2026-01-10T12:20:42.037662Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [48:190:2142] 2026-01-10T12:20:42.039537Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:190:2142] 2026-01-10T12:20:42.048666Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.049079Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|17655a58-a5434541-e62f9929-e99ac179_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.056706Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b08444b5-54230a1c-b82a0f0d-6e76c71e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.094669Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.095343Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ec801385-939775be-8f46bf1b-c8ad98e0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.106250Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.106826Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|97e251de-976d816a-8d0686b-78baaabd_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.123108Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.123707Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a8c85552-d8ed19b0-266c815a-9fca997d_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.134832Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.135396Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6296fd51-76495c07-d4fb69e6-8b43c57f_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.602790Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:112:2057] recipient: [49:105:2138] 2026-01-10T12:20:42.654322Z node 49 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:42.654383Z node 49 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:42.654423Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:42.654468Z node 49 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927938 is [49:157:2176] sender: [49:158:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:181:2057] recipient: [49:14:2061] 2026-01-10T12:20:42.676630Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:42.677623Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 49 actor [49:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 } 2026-01-10T12:20:42.678723Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [49:187:2142] 2026-01-10T12:20:42.682081Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:187:2142] 2026-01-10T12:20:42.684350Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [49:188:2142] 2026-01-10T12:20:42.686981Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:188:2142] 2026-01-10T12:20:42.699327Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.699929Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ca07dce-9b43a391-d228f55c-f64f040c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.707105Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e304e08-f6d49df4-34e09fe8-a7f8f9f5_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.741293Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.742075Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c9781f47-db520655-7a4f5c53-ddcfa18b_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.754760Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.755392Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|357409cb-64dbb96b-dbb4504c-7c7388cf_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.769337Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.769958Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|997947aa-381d9b34-7afd250d-f1d286bd_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:42.780976Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:42.781582Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|eb6ef904-3a8c1050-670aef8e-3d13563_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |92.6%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkLocks::TInvalidate |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkLocks::InvalidateOnCommit >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpSinkMvcc::SnapshotExpiration >> KqpSinkTx::OlapDeferredEffects >> KqpSinkLocks::VisibleUncommittedRows >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpCost::VectorIndexLookup+useSink [GOOD] |92.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32643, MsgBus: 4115 2026-01-10T12:20:13.661558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706676277876677:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:13.662103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:13.885118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:13.885226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:13.951692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:13.952056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:13.955274Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706676277876647:2081] 1768047613660014 != 1768047613660017 2026-01-10T12:20:13.957805Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:14.075735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:14.075753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:14.075758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:14.075836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:14.206639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:14.490788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:14.551415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.675308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:14.691276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.853292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:14.931912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.026993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706693457747699:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.027115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.027489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706693457747709:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.027545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.454332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.489633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.534756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.567244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.606396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.661495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.712709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.761643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:17.850173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706693457748582:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.850252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.850581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706693457748587:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.850640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706693457748588:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.850687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:17.854920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:17.870217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706693457748591:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:17.955145Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706693457748642:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:18.668194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706676277876677:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:18.708959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:19.556091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... 54775816u;3u];["lS\2";9223372036854775817u;4u];["kO\2";9223372036854775818u;4u];["nI\2";9223372036854775819u;5u];["nN\2";9223372036854775820u;5u];["vB\2";9223372036854775821u;6u];["sF\2";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> TxUsage::WriteToTopic_Demo_41_Table >> TxUsage::WriteToTopic_Demo_11_Table >> BasicUsage::CreateTopicWithStreamingConsumer >> BasicUsage::CreateTopicWithCustomName >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 8028, MsgBus: 4375 2026-01-10T12:20:17.169667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706693110847537:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:17.169815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:17.210093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:17.441504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:17.539606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:17.539699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:17.541632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:17.554538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:17.575790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706693110847496:2081] 1768047617167226 != 1768047617167229 2026-01-10T12:20:17.643623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:17.687525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:17.687559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:17.687640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:17.687763Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:18.187550Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:18.216391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:18.234670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:18.296122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.456623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.606531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:18.678594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.598345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705995751268:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.598478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.598888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706705995751278:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.598929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:20.935693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:20.976734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.013726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.050143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.086655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.143498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.197979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.256912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:21.328278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706710290719447:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.328373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.328396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706710290719452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.328575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706710290719454:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.328631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:21.332034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:21.344024Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706710290719456:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:21.416658Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706710290719507:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:22.169912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706693110847537:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:22.171405Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence; ... 54775816u;3u];["lS\2";9223372036854775817u;4u];["kO\2";9223372036854775818u;4u];["nI\2";9223372036854775819u;5u];["nN\2";9223372036854775820u;5u];["vB\2";9223372036854775821u;6u];["sF\2";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpSnapshotIsolation::TSimpleOltp [GOOD] >> KqpSnapshotIsolation::TSimpleOlap >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::RollbackByIdle |92.6%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::TxReadsCommitted-IsOlap [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap >> KqpSinkMvcc::WriteSkewInsert-IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap >> KqpSinkMvcc::DirtyReads-IsOlap [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::TwoPhaseTx >> KqpSinkLocks::VisibleUncommittedRowsUpdate [GOOD] >> KqpSinkMvcc::DirtyReads+IsOlap >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpSnapshotIsolation::TReadOnlyOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp >> KqpSnapshotIsolation::TConflictReadWriteOltp [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::TInvalidateOlap >> KqpSinkLocks::VisibleUncommittedRows [GOOD] >> KqpSinkLocks::UpdateLocksTwoShards >> TxUsage::WriteToTopic_Demo_12_Table >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp >> KqpLocksTricky::TestNoWrite [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::MixEnginesOldNew ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:19:07.524506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:19:07.524593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:07.524622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:19:07.524656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:19:07.524694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:19:07.524718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:19:07.524757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:07.524831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:19:07.525453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:19:07.525653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:19:07.584800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:19:07.584863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:07.595602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:19:07.595700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:19:07.595853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:19:07.601807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:19:07.602017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:19:07.602506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:07.602661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:19:07.604379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:07.604541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:19:07.605579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:19:07.605623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:07.605668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:19:07.605699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:19:07.605726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:19:07.605900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:19:07.721983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.722988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.723944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.724042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.724166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:07.724254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... cipient [3:712:2698]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:54.636541Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:20:54.636639Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000038 last cleanup 0 2026-01-10T12:20:54.636718Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:20:54.636762Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2026-01-10T12:20:54.636798Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2026-01-10T12:20:54.636833Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-10T12:20:54.636982Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:712:2698]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:54.637261Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-10T12:20:54.638880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:712:2698], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 433 Memory: 124368 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 129 TableOwnerId: 72057594046678944 FollowerId 2026-01-10T12:20:54.638937Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:20:54.639005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0433 2026-01-10T12:20:54.639142Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:20:54.639191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:20:54.639944Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435080, Sender [3:1465:3407], Recipient [3:712:2698]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2026-01-10T12:20:54.688487Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:54.688577Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:54.688618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-10T12:20:54.688747Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-10T12:20:54.688787Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-10T12:20:54.688927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-10T12:20:54.689001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-10T12:20:54.689041Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-10T12:20:54.689132Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2026-01-10T12:20:54.689211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-10T12:20:54.689320Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:20:54.703439Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:54.703529Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:20:54.703567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:20:54.996853Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:54.996941Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:54.997049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:54.997087Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:55.394430Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:55.394525Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:55.394649Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:55.394690Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:55.774771Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:55.774851Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:55.774937Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:55.774980Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.186026Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.186104Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.186210Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.186245Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.583066Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.583155Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.583282Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.583326Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.654825Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:712:2698]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:20:56.964944Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.965023Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:20:56.965110Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:20:56.965164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkTx::Interactive >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> KqpSinkLocks::OlapVisibleUncommittedRows [GOOD] >> KqpSinkLocks::OlapUpdateLocksTwoShards >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail+useSink >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> KqpSnapshotIsolation::TReadOwnChangesOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpSinkMvcc::TxReadsCommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: 2026-01-10T12:18:26.591416Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:26.664856Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:26.664935Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:26.665001Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.665069Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:26.683897Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-10T12:18:26.684017Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:26.713349Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-10T12:18:26.713558Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.716132Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-10T12:18:26.716328Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.716434Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.716499Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.716553Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:26.717357Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:26.717813Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:26.720700Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:26.720776Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:26.720837Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:26.720895Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:26.720967Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:26.723789Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:26.723852Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:26.723900Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.723969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.724004Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:26.724048Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.724119Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.724221Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:26.724285Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:26.724336Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.724371Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2026-01-10T12:18:26.724414Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2026-01-10T12:18:26.724447Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:18:26.724484Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:18:26.724545Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:26.724825Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.724871Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:26.724961Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:26.725173Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:26.725414Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:26.727516Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:26.727575Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-10T12:18:26.727625Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:26.727685Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:26.727740Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:26.729843Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:18:26.729896Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:18:26.729934Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.729985Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.730019Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:26.730050Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:26.730091Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.730147Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-10T12:18:26.730183Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-10T12:18:26.730266Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:26.730304Z node 1 :PERSQUEUE DEBUG: p ... ip request needed for owner 2026-01-10T12:20:54.345788Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|156b7414-554ba895-3ed1ebcd-10ac7c30_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:54.622463Z node 74 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:54.622911Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|328f418a-d8ae2651-e83b15c9-99034346_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:55.406649Z node 75 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 75 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:108:2057] recipient: [75:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:108:2057] recipient: [75:106:2138] Leader for TabletID 72057594037927937 is [75:112:2142] sender: [75:113:2057] recipient: [75:106:2138] 2026-01-10T12:20:55.474770Z node 75 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:55.474844Z node 75 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:55.474899Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:55.474964Z node 75 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:154:2057] recipient: [75:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:154:2057] recipient: [75:152:2172] Leader for TabletID 72057594037927938 is [75:158:2176] sender: [75:159:2057] recipient: [75:152:2172] Leader for TabletID 72057594037927937 is [75:112:2142] sender: [75:184:2057] recipient: [75:14:2061] 2026-01-10T12:20:55.502134Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:55.503558Z node 75 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 73 actor [75:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 73 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 73 } Consumers { Name: "user" Generation: 73 Important: true } 2026-01-10T12:20:55.505397Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [75:190:2142] 2026-01-10T12:20:55.508744Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [75:190:2142] 2026-01-10T12:20:55.510989Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [75:191:2142] 2026-01-10T12:20:55.513562Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [75:191:2142] 2026-01-10T12:20:55.515406Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [75:192:2142] 2026-01-10T12:20:55.518034Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [75:192:2142] 2026-01-10T12:20:55.675239Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:55.675842Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8a34d897-c5c97753-49337593-42ebc6f1_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:56.179351Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:56.179934Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f481efb-127e518c-88226e05-f4394a9c_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:56.559294Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:56.559849Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6cda606f-296c5cec-820a0916-8529ccde_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-10T12:20:56.975549Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:56.976144Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e41a83e1-187ffd41-30ca426a-c41bb92b_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:57.232711Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:57.233283Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|cd8df06-54c9984b-7bbf0249-db8ce040_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:57.938542Z node 76 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 76 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:108:2057] recipient: [76:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:108:2057] recipient: [76:106:2138] Leader for TabletID 72057594037927937 is [76:112:2142] sender: [76:113:2057] recipient: [76:106:2138] 2026-01-10T12:20:58.017932Z node 76 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:20:58.017998Z node 76 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:20:58.018043Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:58.018109Z node 76 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:154:2057] recipient: [76:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:154:2057] recipient: [76:152:2172] Leader for TabletID 72057594037927938 is [76:158:2176] sender: [76:159:2057] recipient: [76:152:2172] Leader for TabletID 72057594037927937 is [76:112:2142] sender: [76:184:2057] recipient: [76:14:2061] 2026-01-10T12:20:58.040367Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:20:58.041657Z node 76 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 74 actor [76:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 74 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 74 } Consumers { Name: "user" Generation: 74 Important: true } 2026-01-10T12:20:58.043161Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [76:190:2142] 2026-01-10T12:20:58.046192Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [76:190:2142] 2026-01-10T12:20:58.048435Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [76:191:2142] 2026-01-10T12:20:58.050787Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [76:191:2142] 2026-01-10T12:20:58.052745Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [76:192:2142] 2026-01-10T12:20:58.055022Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [76:192:2142] 2026-01-10T12:20:58.261781Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:58.262370Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bf578b85-4eea60d4-be4e8f60-ea8b71a3_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:58.748325Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:58.749031Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a1b725a5-1cd4af73-94ec1cf2-d540ca73_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:59.176844Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:59.177393Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|758fb1b8-775ea58a-531f7c48-a1596e8b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-10T12:20:59.560036Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:59.560690Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e35e1a55-575a9766-694e4f9d-60063c7_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:20:59.786189Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:20:59.786657Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4a1ea362-7de9a47-1faf648d-e891d1a_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpSinkLocks::UpdateLocksTwoShards [GOOD] >> KqpSinkLocks::UpdateLocksOneShard >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table >> KqpSinkMvcc::WriteSkewUpsert+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap >> BasicUsage::CreateTopicWithStreamingConsumer [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::ReadWithRestarts >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_12_Table [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query >> TxUsage::WriteToTopic_Demo_41_Table [GOOD] >> TxUsage::WriteToTopic_Demo_12_Query >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> TxUsage::WriteToTopic_Demo_42_Table >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap >> KqpTx::MixEnginesOldNew [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap >> KqpSinkTx::Interactive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 1056, MsgBus: 62072 2026-01-10T12:20:43.069627Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807252201647:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.069707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.324255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.353236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.353364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.358974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.465062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.466440Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706807252201626:2081] 1768047643068842 != 1768047643068845 2026-01-10T12:20:43.563089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.563118Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.563126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.563240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:43.578169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.093275Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.154454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.160797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:44.215041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:44.386504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:44.618398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:44.694759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.723168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820137105387:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.723281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.723746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820137105397:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.723802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.009028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.036023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.065563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.096065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.129160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.168537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.201412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.248883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.327274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824432073562:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.327318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.327515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824432073567:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.327553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824432073568:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.327620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.331262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.341119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706824432073571:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:47.423844Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706824432073622:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.069890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706807252201647:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.069953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:50.477689Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTQxMzllMjktNWQ4OTc4MmYtNGJkM2EwMDItMTdj ... BhYTAtNzBiNmM0MmI=, ActorId: [2:7593706867926531491:2531], ActorState: ReadyState, LegacyTraceId: 01kekxmghg2fqqgz3qsy9n8360, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekxmgfr0g8nmtehn2xgyxsr" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 19057, MsgBus: 17595 2026-01-10T12:20:58.658306Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706872998436247:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:58.658907Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:58.744331Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:58.924293Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:58.925948Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:58.926027Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:58.936696Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:58.969425Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:59.150946Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:59.150964Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:59.150969Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:59.151029Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:59.341377Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:59.679366Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:59.699895Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.730470Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:59.754556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.924764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:00.001519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.849710Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890178307259:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.849830Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.850359Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890178307269:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.850406Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.913985Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.956229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.994605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.032968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.071569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.109782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.144838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.196304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.306264Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706894473275431:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.306402Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.306861Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706894473275436:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.306924Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706894473275437:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.306966Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.311271Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:03.324059Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706894473275440:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:21:03.420810Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706894473275491:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:03.658634Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706872998436247:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:03.658719Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 15246, MsgBus: 23205 2026-01-10T12:20:43.898853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804747405373:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.899087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.932379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:44.341420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.341522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.402644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.432443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.432494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.616580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.616602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.616609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.616701Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.636749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.904596Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.302767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.310986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:45.375156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.535777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.696016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.763164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.310860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821927276269:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.310999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.311337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821927276279:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.311427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.653360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.688197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.724405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.761255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.798004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.849929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.913642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.962304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.071864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826222244449:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.071966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.072098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826222244454:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.072152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826222244456:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.072204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.077056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.089780Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706826222244458:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:48.199394Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706826222244509:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.903350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706804747405373:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.903418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.937926Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODQzZTZjY ... node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706858234530437:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25147, MsgBus: 11978 2026-01-10T12:20:58.780832Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706868881477996:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:58.781028Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:58.803045Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:58.899193Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:58.903866Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706868881477768:2081] 1768047658751733 != 1768047658751736 2026-01-10T12:20:58.931470Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:58.931534Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:58.940867Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:59.090486Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:59.120626Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:59.120646Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:59.120653Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:59.120732Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:59.597867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:59.610620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.677958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.781585Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:59.824627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.904887Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.604891Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706886061348829:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.604992Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.605517Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706886061348838:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.605562Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:02.674938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.709588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.751527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.799034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.839145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.882491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.922721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:02.974922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.056206Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890356317008:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.056305Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.056751Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890356317014:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.056829Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.056888Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890356317013:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.061615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:03.080787Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706890356317017:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:21:03.179410Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706890356317068:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:03.780567Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706868881477996:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:03.780648Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSnapshotIfInsertRead [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpLocks::MixedTxFail-useSink >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 11208, MsgBus: 19390 2026-01-10T12:20:43.168444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807311410193:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.169951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.235122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:43.544202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.544321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.577275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.621875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.635539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.775129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.804794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.804817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.804824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.804897Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.191347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.368011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.380860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.453426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820196312940:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.453447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820196312929:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.453530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.453824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820196312944:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.453875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.457101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.468244Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706820196312943:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:46.608819Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706820196312996:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:46.886940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.990317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.902187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.597309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706807311410193:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.706047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.771236Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGUwN2FmYjEtMjVkMDlkZjAtNWI3MDJjNjktMjJiYTc0NQ==, ActorId: [1:7593706833081222754:2962], ActorState: ReadyState, LegacyTraceId: 01kekxm8yz0ds60jzf7xp10y0r, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekxm8kf20hrxd5y2z7bx7g7" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 17191, MsgBus: 65409 2026-01-10T12:20:51.063301Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706842533824381:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:51.065348Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:51.071812Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:51.208175Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:51.209999Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:51.211344Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706842533824169:2081] 1768047651037531 != 1768047651037534 2026-01-10T12:20:51.245858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:51.245942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:51.249594Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:51.462268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.476682Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:51.476703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:51.476711Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:51.476788Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:52.037772Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:52.038516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:52.053171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:54.796903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706855418726921:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.797016Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.797379Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706855418726956:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.801644Z node 2 :FLAT_TX_SCHEMESHARD WARN: ... 0, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.987813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:56.014564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:56.043632Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593706842533824381:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:56.043703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:57.468696Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-10T12:20:57.468919Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:20:57.469070Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:20:57.469278Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7593706868303636912:2964], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [2:7593706868303636888:2964]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7593706868303636912:2964].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:20:57.469891Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7593706868303636905:2964], SessionActorId: [2:7593706868303636888:2964], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7593706868303636888:2964]. 2026-01-10T12:20:57.470153Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YzRlZWM2NGUtNmMzODU3NmQtNWFmNmI0MjQtNTE4NDdjMjc=, ActorId: [2:7593706868303636888:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmgd9c1d0nc8j6f76d5v5, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7593706868303636907:2964] from: [2:7593706868303636905:2964] trace_id# 2026-01-10T12:20:57.470274Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:7593706868303636907:2964] TxId: 281474976710664. Ctx: { TraceId: 01kekxmgd9c1d0nc8j6f76d5v5, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzRlZWM2NGUtNmMzODU3NmQtNWFmNmI0MjQtNTE4NDdjMjc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:20:57.470598Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YzRlZWM2NGUtNmMzODU3NmQtNWFmNmI0MjQtNTE4NDdjMjc=, ActorId: [2:7593706868303636888:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmgd9c1d0nc8j6f76d5v5, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KV`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 2026-01-10T12:20:57.565652Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YzRlZWM2NGUtNmMzODU3NmQtNWFmNmI0MjQtNTE4NDdjMjc=, ActorId: [2:7593706868303636888:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmggm7y8a1f0m8zbke40p, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekxmgcyfyyavfexek871f1a" issue_code: 2015 severity: 1 } trace_id#
: Error: Transaction not found: 01kekxmgcyfyyavfexek871f1a, code: 2015 Trying to start YDB, gRPC: 11302, MsgBus: 15675 2026-01-10T12:20:59.085861Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706873255668471:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:59.096119Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:59.148035Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:59.217983Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:59.245724Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:59.245819Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:59.262647Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:59.350158Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:59.350181Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:59.350192Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:59.350309Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:59.429716Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:59.849916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:59.857028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:21:00.096214Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:03.030159Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890435538443:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.030253Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.030644Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890435538479:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.030693Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706890435538480:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.030778Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:03.034370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:03.045602Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706890435538483:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:03.122510Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706890435538536:2532] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:03.183221Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.231439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.092755Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706873255668471:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:04.095015Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:04.290910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert >> KqpSinkLocks::UpdateLocksOneShard [GOOD] >> KqpSinkLocks::UpdateLocksOneShardRowExists >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> TFlatTest::AutoSplitMergeQueue [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace-IsOlap >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> KqpSinkMvcc::WriteSkewUpsert-IsOlap [GOOD] >> KqpSinkTx::DeferredEffects >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 64269, MsgBus: 20836 2026-01-10T12:20:43.760853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807122561552:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.760892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.196256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.205003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.205099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.270280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.363807Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.378987Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706807122561528:2081] 1768047643743178 != 1768047643743181 2026-01-10T12:20:44.443391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.496815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.496840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.496854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.496958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.808490Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.025105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.240921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824302431602:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.240927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824302431610:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.241050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.242141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824302431617:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.242224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.244806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.256343Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706824302431616:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.384520Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706824302431669:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.706257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:47.880005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.881401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.881647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.881746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.881876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.882008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.882127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.882217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.882315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.882410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.882533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:47.882667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:47.882764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706824302431839:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:47.884811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.884848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.885027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.885111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.885195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.885281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.885369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.885496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.885595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706824302431838:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... d;ss_local=44;result=not_found; 2026-01-10T12:20:56.644693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.644745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.644766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.648336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.648451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.648471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.653375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.653432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.653446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.655910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.655951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.655967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.661112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.661165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.661182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.663414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.663463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.663475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.668838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.668887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:56.668916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:20:59.180173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:20:59.180203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 9984, MsgBus: 61047 2026-01-10T12:21:00.998163Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:00.998302Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:01.091630Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:01.096249Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706879841522037:2081] 1768047660976879 != 1768047660976882 2026-01-10T12:21:01.133242Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:01.133328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:01.139807Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:01.204485Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:01.204520Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:01.204529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:01.204609Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:01.239232Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:01.713682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:01.993518Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:04.710205Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706897021392088:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.710296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.710359Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706897021392116:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.711682Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706897021392118:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.711779Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.789560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:04.804299Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706897021392119:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:04.886717Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593706897021392171:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:04.947278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.021023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.998929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2026-01-10T12:19:41.677949Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706541406256161:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:41.678955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:41.918291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:41.923295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:41.923415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:41.953828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:42.041045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:42.045187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706541406256107:2081] 1768047581672494 != 1768047581672497 2026-01-10T12:19:42.214098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:42.249550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:42.255419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:19:42.339469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) A-0 2026-01-10T12:19:42.700774Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; B-0 2026-01-10T12:19:42.867477Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.019s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-10T12:19:42.873364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-10T12:19:42.893187Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.021s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-10T12:19:42.894402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-10T12:19:42.973162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:19:42.973296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 6291502 row count 1 2026-01-10T12:19:42.973350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2026-01-10T12:19:42.973491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-10T12:19:42.973806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:19:43.170275Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.017s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 A-1 2026-01-10T12:19:43.176465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2026-01-10T12:19:43.203547Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.027s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2026-01-10T12:19:43.204896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2026-01-10T12:19:43.276763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:19:43.276840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583020 row count 2 2026-01-10T12:19:43.276867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583020 2026-01-10T12:19:43.276920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-10T12:19:43.277263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2026-01-10T12:19:43.427547Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.020s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-10T12:19:43.452835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2026-01-10T12:19:43.512284Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.062s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2026-01-10T12:19:43.553267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:19:43.553364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874522 row count 3 2026-01-10T12:19:43.553400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874522 2026-01-10T12:19:43.553499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:19:43.553592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 599 seconds 2026-01-10T12:19:43.553646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-10T12:19:43.553752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:19:43.557866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 4 ms, with status# 1, next wakeup in# 599.995603s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:19:43.590869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874538 rowCount 3 cpuUsage 0 2026-01-10T12:19:43.690964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:19:43.691068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874538 row count 3 2026-01-10T12:19:43.691104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874538 2026-01-10T12:19:43.691168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-10T12:19:43.691761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2026-01-10T12:19:43.737099Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=Done, 2 blobs 1r (max 1), put Spent{time=0.034s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-10T12:19:43.756196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 25166040 rowCount 4 cpuUsage 0 2026-01-10T12:19:43.852597Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 4} end=Done, 5 blobs 4r (max 4), put Spent{time=0.102s,wait=0.0 ... :TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2026-01-10T12:20:46.656265Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037904 not found WriteKVRow: 221 EraseKVRow: 191 2026-01-10T12:20:46.718435Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037906 not found 2026-01-10T12:20:46.718520Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found WriteKVRow: 222 2026-01-10T12:20:46.763753Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found 2026-01-10T12:20:46.763795Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found EraseKVRow: 192 WriteKVRow: 223 EraseKVRow: 193 2026-01-10T12:20:46.795337Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037907 not found 2026-01-10T12:20:46.815998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037911 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715710:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037911 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\320\000\000\000" } SchemeshardId: 72057594046644480 WriteKVRow: 224 2026-01-10T12:20:46.909399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037910 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715711:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037910 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\265\000\000\000" } SchemeshardId: 72057594046644480 2026-01-10T12:20:46.910523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037911 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715712:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037911 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\320\000\000\000" } SchemeshardId: 72057594046644480 EraseKVRow: 194 WriteKVRow: 225 EraseKVRow: 195 WriteKVRow: 226 EraseKVRow: 196 WriteKVRow: 227 EraseKVRow: 197 2026-01-10T12:20:47.034458Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037910 not found 2026-01-10T12:20:47.034495Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found WriteKVRow: 228 EraseKVRow: 198 WriteKVRow: 229 2026-01-10T12:20:47.116936Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found EraseKVRow: 199 WriteKVRow: 230 EraseKVRow: 200 WriteKVRow: 231 EraseKVRow: 201 WriteKVRow: 232 EraseKVRow: 202 WriteKVRow: 233 EraseKVRow: 203 WriteKVRow: 234 EraseKVRow: 204 WriteKVRow: 235 EraseKVRow: 205 WriteKVRow: 236 EraseKVRow: 206 WriteKVRow: 237 EraseKVRow: 207 WriteKVRow: 238 EraseKVRow: 208 WriteKVRow: 239 EraseKVRow: 209 WriteKVRow: 240 EraseKVRow: 210 WriteKVRow: 241 EraseKVRow: 211 WriteKVRow: 242 EraseKVRow: 212 WriteKVRow: 243 EraseKVRow: 213 WriteKVRow: 244 EraseKVRow: 214 WriteKVRow: 245 EraseKVRow: 215 WriteKVRow: 246 EraseKVRow: 216 WriteKVRow: 247 EraseKVRow: 217 WriteKVRow: 248 EraseKVRow: 218 WriteKVRow: 249 EraseKVRow: 219 WriteKVRow: 250 EraseKVRow: 220 WriteKVRow: 251 EraseKVRow: 221 2026-01-10T12:20:48.014873Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found 2026-01-10T12:20:48.014915Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found 2026-01-10T12:20:48.096404Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2026-01-10T12:20:48.096809Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found WriteKVRow: 252 EraseKVRow: 222 2026-01-10T12:20:48.165702Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037922.1.6, eph 467} end=Term, 0 blobs 0r (max 35), put Spent{time=0.016s,wait=0.000s,interrupts=0} 2026-01-10T12:20:48.165795Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037922:1:13} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 6, product {0 parts epoch 0} thrown WriteKVRow: 253 2026-01-10T12:20:48.201448Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037918 not found 2026-01-10T12:20:48.272369Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715718:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037922 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\363\000\000\000" } SchemeshardId: 72057594046644480 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 WriteKVRow: 255 EraseKVRow: 225 WriteKVRow: 256 EraseKVRow: 226 2026-01-10T12:20:48.411230Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 WriteKVRow: 259 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 2026-01-10T12:21:03.091042Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2026-01-10T12:21:03.094032Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found 2026-01-10T12:21:03.226444Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found 2026-01-10T12:21:03.230896Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found 2026-01-10T12:21:03.398629Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found 2026-01-10T12:21:03.422333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715725:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037927 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\367\000\000\000" } SchemeshardId: 72057594046644480 2026-01-10T12:21:03.467098Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037925 not found 2026-01-10T12:21:03.467145Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2026-01-10T12:21:03.474903Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2026-01-10T12:21:03.492778Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found 2026-01-10T12:21:04.590092Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037933 not found 2026-01-10T12:21:04.590156Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2026-01-10T12:21:04.590177Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found 2026-01-10T12:21:04.590198Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2026-01-10T12:21:04.721521Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found 2026-01-10T12:21:04.721603Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037934 not found 2026-01-10T12:21:04.897769Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037935 not found 2026-01-10T12:21:04.900925Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037936 not found 2026-01-10T12:21:08.522855Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2026-01-10T12:21:08.550801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6004: Got TEvSplitPartitioningChangedAck for unknown txId 281474976715730 datashard 72075186224037937 2026-01-10T12:21:08.556887Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037937 not found |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 9299, MsgBus: 21313 2026-01-10T12:20:44.864017Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706811052847861:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.864097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:45.180242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:45.187237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:45.187319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:45.253889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:45.345885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:45.348243Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706811052847830:2081] 1768047644846852 != 1768047644846855 2026-01-10T12:20:45.367302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.456945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.456973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.456988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.457413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.860250Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.882482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.937357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.075317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.212285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.265965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.397295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706828232718892:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.397399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.397739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706828232718902:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.397788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.850336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.913084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.997838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.043611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.088015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.174416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.234155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.308936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.423206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706832527687073:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.423351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.423964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706832527687078:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.424015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706832527687079:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.424201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.429055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:49.449648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706832527687082:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:49.538351Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706832527687133:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:49.852452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706811052847861:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.852499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:51.785113Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710675; 2026-01-10T12:20:51.798081Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7593706841117622110:2533], Table: `/Root/TwoShard` ([7205 ... node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:00.864653Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:00.864661Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:00.864758Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:00.897581Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:01.413398Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:01.423377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:21:01.442814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:01.540355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:01.676505Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:01.710623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:01.788811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.452647Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706895817877998:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.452732Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.453727Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706895817878007:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.453794Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.537112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.592410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.637298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.673970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.712060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.790702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.843392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.931150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.045790Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706900112846183:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.045881Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.046169Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706900112846188:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.046206Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706900112846189:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.046239Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.050584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:05.061546Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706900112846192:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:21:05.152727Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706900112846243:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:05.662520Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706878638007005:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:05.662584Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:09.938107Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593706917292715839:2533], SessionActorId: [3:7593706908702781155:2533], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 . sessionActorId=[3:7593706908702781155:2533]. 2026-01-10T12:21:09.938361Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=OGFkZjNlNjktNzk5NWQ3MjEtMTdiMzI1My00Nzk4ZTgxYg==, ActorId: [3:7593706908702781155:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxmwbsey2nr1tagcmx5j2k, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593706917292715840:2533] from: [3:7593706917292715839:2533] trace_id# 2026-01-10T12:21:09.938442Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706917292715840:2533] TxId: 281474976710675. Ctx: { TraceId: 01kekxmwbsey2nr1tagcmx5j2k, Database: /Root, SessionId: ydb://session/3?node_id=3&id=OGFkZjNlNjktNzk5NWQ3MjEtMTdiMzI1My00Nzk4ZTgxYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 } trace_id# 2026-01-10T12:21:09.938719Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=OGFkZjNlNjktNzk5NWQ3MjEtMTdiMzI1My00Nzk4ZTgxYg==, ActorId: [3:7593706908702781155:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxmwbsey2nr1tagcmx5j2k, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/EightShard`" issue_code: 2001 severity: 1 } trace_id# |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::DirtyReads+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 1055, MsgBus: 22400 2026-01-10T12:20:43.572781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706805121026894:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.592987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.654102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:43.908901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.928143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.928200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.978667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.208275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706805121026854:2081] 1768047643534566 != 1768047643534569 2026-01-10T12:20:44.228641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.228683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.228690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.228777Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.230699Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.256187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.600502Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.731212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.744998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.918816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818005929647:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.918826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818005929618:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.918907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.919226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818005929657:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.919281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.922264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.932957Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706818005929656:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.027500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706818005929709:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.294828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.438845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.445294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.713315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706805121026894:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.231608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.743317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.988388Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZjE3ZjJjNzEtOGIxYjIyNTItYTgxMTI2NS1lYWQ2NTY3ZA==, ActorId: [1:7593706830890839639:2961], ActorState: ExecuteState, LegacyTraceId: 01kekxma8x7g02kmbyg245h10s, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 14952, MsgBus: 13170 2026-01-10T12:20:53.528188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.528283Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:53.677664Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.681230Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.681303Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.696333Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.739201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:53.928692Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.928718Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.928727Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.928804Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:54.480194Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.606007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.617348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:57.286012Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706867562867334:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.286085Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706867562867321:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.286231Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.288204Z node 2 :KQP_WORKLOAD_SERVICE WARN: ... 90 at tablet 72075186224038090, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 1954, MsgBus: 11582 2026-01-10T12:21:03.025968Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706892388099335:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:03.026150Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:03.058985Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:03.194959Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:03.200254Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706892388099293:2081] 1768047663024741 != 1768047663024744 2026-01-10T12:21:03.210370Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:03.210457Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:03.236393Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:03.352163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:03.388736Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:03.388762Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:03.388771Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:03.388849Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:04.059577Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:04.089648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:04.095373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:21:04.122254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.197164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.377938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.452048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:06.989703Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706905273003052:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:06.989805Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:06.990166Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706905273003062:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:06.990219Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.141208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.198567Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.246444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.298335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.339258Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.383548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.429053Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.492496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.635540Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706909567971227:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.635656Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.636055Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706909567971232:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.636125Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706909567971233:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.636170Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.640496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:07.651621Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706909567971236:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:21:07.752588Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706909567971287:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:08.026216Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706892388099335:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:08.026281Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> BasicUsage::ReadWithRestarts [GOOD] >> Describe::LocationWithKillTablets >> TxUsage::WriteToTopic_Demo_12_Query [GOOD] >> SlowTopicAutopartitioning::CDC_Write [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_13_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 2130, MsgBus: 1770 2026-01-10T12:20:43.824362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807533900298:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.824447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.188070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.193337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.193447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.274989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.361799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.361975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706807533900273:2081] 1768047643820034 != 1768047643820037 2026-01-10T12:20:44.372912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.543639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.543667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.543673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.543744Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.854265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.059178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.079757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:45.131353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.270283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.428380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.503721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.382601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824713771338:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.382711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.383010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824713771348:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.383051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.686095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.723798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.757723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.802083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.874225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.945181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.024204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.082691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.173045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829008739524:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.173126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.173407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829008739529:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.173454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829008739530:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.173542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.176816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.190795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706829008739533:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:48.295944Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706829008739584:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.828171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706807533900298:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.829467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24694, MsgBus: 29170 2026-01-10T12:20:52.247090Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... 2026-01-10T12:21:03.602675Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:03.668347Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:03.668434Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:03.675091Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:03.748636Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:03.748656Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:03.748663Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:03.748731Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:03.756142Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:04.225520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:04.233075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:21:04.247760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.344910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.528997Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:04.584509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.689193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.204805Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706911260726090:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.204887Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.205196Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706911260726100:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.205241Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.284012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.331331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.367626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.400260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.436577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.484001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.535739Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.594809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:07.684368Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706911260726972:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.684466Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.684776Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706911260726977:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.684841Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706911260726978:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.684917Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:07.689610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:07.700533Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706911260726981:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:21:07.759986Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706911260727032:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:08.549030Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706894080855107:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:08.549461Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:09.897785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:09.941014Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:09.986958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] >> KqpSinkLocks::OlapUpdateLocksTwoShards [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShard >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> TraverseColumnShard::TraverseColumnTable >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> KqpSinkTx::OlapInteractive >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert >> TraverseDatashard::TraverseOneTableServerless >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> AnalyzeDatashard::DropTableNavigateError >> KqpLocks::MixedTxFail-useSink [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::LocksAbortOnCommit >> AnalyzeColumnshard::AnalyzeRebootSa >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2026-01-10T12:19:29.029268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706488001028088:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:29.029917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:29.070012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:19:29.078484Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:19:29.352441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:19:29.399245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:29.399979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:29.404836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:29.442419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:29.446778Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706488001028049:2081] 1768047569028113 != 1768047569028116 2026-01-10T12:19:29.516537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:19:29.582799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002af4/r3tmp/yandexM3yrLr.tmp 2026-01-10T12:19:29.582823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002af4/r3tmp/yandexM3yrLr.tmp 2026-01-10T12:19:29.582989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002af4/r3tmp/yandexM3yrLr.tmp 2026-01-10T12:19:29.583058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:19:29.648862Z INFO: TTestServer started on Port 15645 GrpcPort 30110 PQClient connected to localhost:30110 2026-01-10T12:19:29.852771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:29.925137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:30.040201Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:32.018114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706500885930974:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:32.018120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706500885930985:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:32.018501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:32.018739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706500885930991:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:32.018787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:19:32.023452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:19:32.035978Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706500885930989:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:19:32.142524Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706500885931057:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:19:32.398792Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706500885931065:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:19:32.402501Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjIxZDc3Ni05MDAzNzdkMS01ZDIzM2ZlNi1jNTc4ZDNmOQ==, ActorId: [1:7593706500885930972:2329], ActorState: ExecuteState, LegacyTraceId: 01kekxhx1eb203st4kft7pgvfw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:19:32.407885Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:19:32.416221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.444399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:32.512992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593706500885931345:2819] 2026-01-10T12:19:34.029573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706488001028088:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:34.029774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-10T12:19:38.669483Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2026-01-10T12:19:38.823142Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][] pipe [1:7593706526655735448:2988] connected; active server actors: 1 2026-01-10T12:19:38.823963Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:19:38.825366Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037894][streamImpl] BALANCER INIT DONE for streamImpl: (0, 72075186224037893) 2026-01-10T12:19:38.825758Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037894][streamImpl] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037894 2026-01-10T12:19:38.833021Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037894][streamImpl] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 1 2026-01-10T12:19:38.857749Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037892:1][1:7593706526655735508:2477] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:51:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:19:38.880596Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added pa ... Data. Partition 59. Read: {53, 10} (459-459) 2026-01-10T12:21:14.557166Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 11} (460-460) 2026-01-10T12:21:14.557193Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 12} (461-461) 2026-01-10T12:21:14.557220Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 13} (462-462) 2026-01-10T12:21:14.557250Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 14} (463-463) 2026-01-10T12:21:14.557277Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 15} (464-464) 2026-01-10T12:21:14.557303Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 16} (465-465) 2026-01-10T12:21:14.557330Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 17} (466-466) 2026-01-10T12:21:14.557359Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 18} (467-467) 2026-01-10T12:21:14.557388Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 19} (468-468) 2026-01-10T12:21:14.557416Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 20} (469-469) 2026-01-10T12:21:14.557444Z :DEBUG: [/Root] Take Data. Partition 59. Read: {53, 21} (470-470) 2026-01-10T12:21:14.557472Z :DEBUG: [/Root] Take Data. Partition 59. Read: {54, 0} (471-471) 2026-01-10T12:21:14.557498Z :DEBUG: [/Root] Take Data. Partition 59. Read: {54, 1} (472-472) 2026-01-10T12:21:14.557528Z :DEBUG: [/Root] Take Data. Partition 59. Read: {54, 2} (473-473) 2026-01-10T12:21:14.557554Z :DEBUG: [/Root] Take Data. Partition 59. Read: {54, 3} (474-474) 2026-01-10T12:21:14.557585Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 0} (475-475) 2026-01-10T12:21:14.557610Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 1} (476-476) 2026-01-10T12:21:14.557641Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 2} (477-477) 2026-01-10T12:21:14.557667Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 3} (478-478) 2026-01-10T12:21:14.557692Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 4} (479-479) 2026-01-10T12:21:14.557715Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 5} (480-480) 2026-01-10T12:21:14.557740Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 6} (481-481) 2026-01-10T12:21:14.557767Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 7} (482-482) 2026-01-10T12:21:14.557794Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 8} (483-483) 2026-01-10T12:21:14.557817Z :DEBUG: [/Root] Take Data. Partition 59. Read: {55, 9} (484-484) 2026-01-10T12:21:14.557839Z :DEBUG: [/Root] Take Data. Partition 59. Read: {56, 0} (485-485) 2026-01-10T12:21:14.557863Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 0} (486-486) 2026-01-10T12:21:14.557890Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 1} (487-487) 2026-01-10T12:21:14.557913Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 2} (488-488) 2026-01-10T12:21:14.557936Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 3} (489-489) 2026-01-10T12:21:14.557960Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 4} (490-490) 2026-01-10T12:21:14.557983Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 5} (491-491) 2026-01-10T12:21:14.558003Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 6} (492-492) 2026-01-10T12:21:14.558027Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 7} (493-493) 2026-01-10T12:21:14.558053Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 8} (494-494) 2026-01-10T12:21:14.558078Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 9} (495-495) 2026-01-10T12:21:14.558104Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 10} (496-496) 2026-01-10T12:21:14.558126Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 11} (497-497) 2026-01-10T12:21:14.558151Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 12} (498-498) 2026-01-10T12:21:14.558182Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 13} (499-499) 2026-01-10T12:21:14.558205Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 14} (500-500) 2026-01-10T12:21:14.558226Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 15} (501-501) 2026-01-10T12:21:14.558251Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 16} (502-502) 2026-01-10T12:21:14.558273Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 17} (503-503) 2026-01-10T12:21:14.558294Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 18} (504-504) 2026-01-10T12:21:14.558315Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 19} (505-505) 2026-01-10T12:21:14.558337Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 20} (506-506) 2026-01-10T12:21:14.558362Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 21} (507-507) 2026-01-10T12:21:14.558384Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 22} (508-508) 2026-01-10T12:21:14.558407Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 23} (509-509) 2026-01-10T12:21:14.558431Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 24} (510-510) 2026-01-10T12:21:14.558452Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 25} (511-511) 2026-01-10T12:21:14.559208Z :DEBUG: [/Root] Take Data. Partition 59. Read: {57, 26} (512-512) 2026-01-10T12:21:14.559325Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 513, size 94498 bytes 2026-01-10T12:21:14.559364Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 471, size 86728 bytes 2026-01-10T12:21:14.559384Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 520, size 95779 bytes 2026-01-10T12:21:14.559403Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 480, size 88403 bytes 2026-01-10T12:21:14.559423Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 462, size 85052 bytes 2026-01-10T12:21:14.559447Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 500, size 92086 bytes 2026-01-10T12:21:14.559469Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 431, size 79355 bytes 2026-01-10T12:21:14.559488Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] The application data is transferred to the client. Number of messages 522, size 96167 bytes 2026-01-10T12:21:14.559530Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.561704Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.563994Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.567046Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.568527Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.569860Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.571488Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.572742Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:21:14.580448Z :INFO: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] Closing read session. Close timeout: 0.000000s 2026-01-10T12:21:14.580682Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:origin/feed/streamImpl:60:71:519:0 -:origin/feed/streamImpl:69:70:461:0 -:origin/feed/streamImpl:59:69:512:0 -:origin/feed/streamImpl:70:68:430:0 -:origin/feed/streamImpl:58:67:499:0 -:origin/feed/streamImpl:65:66:470:0 -:origin/feed/streamImpl:57:65:521:0 -:origin/feed/streamImpl:66:64:479:0 -:origin/feed/streamImpl:55:63:519:0 -:origin/feed/streamImpl:56:62:495:0 -:origin/feed/streamImpl:64:61:496:0 -:origin/feed/streamImpl:63:60:558:0 -:origin/feed/streamImpl:62:59:506:0 -:origin/feed/streamImpl:61:58:524:0 -:origin/feed/streamImpl:68:57:492:0 -:origin/feed/streamImpl:67:56:335:0 -:origin/feed/streamImpl:38:55:1682:0 -:origin/feed/streamImpl:40:54:1692:0 -:origin/feed/streamImpl:39:53:2364:0 -:origin/feed/streamImpl:37:52:2468:0 -:origin/feed/streamImpl:49:51:2399:0 -:origin/feed/streamImpl:54:50:2272:0 -:origin/feed/streamImpl:50:49:2393:0 -:origin/feed/streamImpl:53:48:2114:0 -:origin/feed/streamImpl:31:47:2524:0 -:origin/feed/streamImpl:33:46:2641:0 -:origin/feed/streamImpl:48:45:2384:0 -:origin/feed/streamImpl:47:44:2436:0 -:origin/feed/streamImpl:41:43:2513:0 -:origin/feed/streamImpl:35:42:2443:0 -:origin/feed/streamImpl:36:41:1791:0 -:origin/feed/streamImpl:32:40:1786:0 -:origin/feed/streamImpl:34:39:1748:0 -:origin/feed/streamImpl:51:38:2043:0 -:origin/feed/streamImpl:52:37:2298:0 -:origin/feed/streamImpl:42:36:2363:0 -:origin/feed/streamImpl:45:35:2373:0 -:origin/feed/streamImpl:46:34:1659:0 -:origin/feed/streamImpl:44:33:1657:0 -:origin/feed/streamImpl:43:32:2456:0 -:origin/feed/streamImpl:30:31:2558:0 -:origin/feed/streamImpl:29:30:2220:0 -:origin/feed/streamImpl:16:29:1935:0 -:origin/feed/streamImpl:28:28:2307:0 -:origin/feed/streamImpl:19:27:1711:0 -:origin/feed/streamImpl:23:26:1820:0 -:origin/feed/streamImpl:24:25:1767:0 -:origin/feed/streamImpl:15:24:1891:0 -:origin/feed/streamImpl:20:23:1722:0 -:origin/feed/streamImpl:21:21:1755:0 -:origin/feed/streamImpl:22:20:1700:0 -:origin/feed/streamImpl:18:19:1840:0 -:origin/feed/streamImpl:17:18:1742:0 -:origin/feed/streamImpl:26:17:1815:0 -:origin/feed/streamImpl:25:16:1792:0 -:origin/feed/streamImpl:14:15:1768:0 -:origin/feed/streamImpl:13:14:1779:0 -:origin/feed/streamImpl:12:13:1719:0 -:origin/feed/streamImpl:11:12:1712:0 -:origin/feed/streamImpl:10:9:1902:0 -:origin/feed/streamImpl:9:8:2100:0 -:origin/feed/streamImpl:5:7:1927:0 -:origin/feed/streamImpl:6:6:1712:0 -:origin/feed/streamImpl:3:5:1697:0 -:origin/feed/streamImpl:4:4:1738:0 -:origin/feed/streamImpl:8:11:2261:0 -:origin/feed/streamImpl:1:3:2641:0 -:origin/feed/streamImpl:27:22:1800:0 -:origin/feed/streamImpl:2:2:2045:0 -:origin/feed/streamImpl:7:10:2005:0 -:origin/feed/streamImpl:0:1:132233:0 2026-01-10T12:21:14.580729Z :INFO: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] Counters: { Errors: 0 CurrentSessionLifetimeMs: 23832 BytesRead: 46043648 MessagesRead: 250000 BytesReadCompressed: 46043648 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:21:14.580808Z :NOTICE: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:21:14.580848Z :DEBUG: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] [] Abort session to cluster 2026-01-10T12:21:14.581290Z :NOTICE: [/Root] [/Root] [9aa76c78-2275c7eb-32ac1490-9eb0f864] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> 2026-01-10T12:21:14.582058Z End 2026-01-10T12:21:14.590690Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][streamImpl] pipe [1:7593706848778301506:4299] disconnected. 2026-01-10T12:21:14.590756Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][streamImpl] pipe [1:7593706848778301506:4299] disconnected; active server actors: 1 2026-01-10T12:21:14.590781Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][streamImpl] pipe [1:7593706848778301506:4299] client consumer-1 disconnected session consumer-1_1_1_14230539292740072449_v1 >> KqpSinkTx::DeferredEffects [GOOD] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] >> TraverseColumnShard::TraverseServerlessColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 16378, MsgBus: 62542 2026-01-10T12:20:43.726044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804629354364:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.726222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.766815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:44.126295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.126405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.228399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.245005Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.245387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.374870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.374909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.374922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.375032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.436156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.728285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.851340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.149544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821809224400:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.149544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821809224412:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.149668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.149943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821809224415:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.149997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.154870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.166755Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706821809224414:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.313045Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706821809224467:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.654636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.753526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.726642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706804629354364:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.728605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:48.778454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 21871, MsgBus: 22504 2026-01-10T12:20:52.072730Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706844951923258:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:52.072782Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:52.136275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:52.221340Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:52.222703Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:52.222771Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:52.225696Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706844951923234:2081] 1768047652071566 != 1768047652071569 2026-01-10T12:20:52.256476Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:52.296629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:52.408640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:52.408663Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:52.408671Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:52.408738Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.035957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:53.048488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:53.100233Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:55.976437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706857836825983:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.976522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706857836826009:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.976729Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.980413Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706857836826021:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.980521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.980669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/scheme ... 86224038036;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.403198Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.403259Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.403275Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.412054Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.412345Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.412367Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.414370Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.414416Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.414431Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.420941Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.421003Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.421020Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.422886Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.422934Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.422950Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.429743Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.429833Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:04.429853Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:07.184458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:07.184487Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 22581, MsgBus: 8850 2026-01-10T12:21:09.219001Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706919475554790:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:09.219054Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:09.246546Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:09.334819Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:09.347184Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:09.347266Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:09.367585Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:09.444065Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:09.444105Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:09.444114Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:09.444191Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:09.531273Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:09.970605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:09.976373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:21:10.228235Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:13.185832Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706936655424781:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.185937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.186079Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706936655424794:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.186433Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706936655424818:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.186568Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.190440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:13.206404Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706936655424821:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:13.299350Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706936655424875:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:13.363235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:13.413937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:14.320052Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706919475554790:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:14.339061Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:14.629263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1244, MsgBus: 7250 2026-01-10T12:20:43.823437Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804934826018:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.824356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.110730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.142103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.142250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.209473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.218412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804934825897:2081] 1768047643780583 != 1768047643780586 2026-01-10T12:20:44.233288Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.418241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.436625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.436645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.436651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.436729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.827699Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.091907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.103811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:47.261198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706822114695978:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.261237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706822114695990:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.261298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.262267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706822114695992:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.262334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.266292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.280995Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706822114695993:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.409819Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706822114696045:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.802419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.940601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.807259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706804934826018:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.807310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.084565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 23452, MsgBus: 24004 2026-01-10T12:20:52.916948Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706846498555490:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:52.917003Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:52.967459Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.073091Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.125356Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.125434Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.137132Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.360180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:53.371014Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.371039Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.371046Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.371122Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.928325Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.116213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.128599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:56.571615Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706863678425507:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.571716Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.572001Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706863678425520:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.572045Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706863678425519:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.572097Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.580434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_p ... nternal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.202372Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.202852Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.202915Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.202931Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.210999Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.211087Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.211105Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.211895Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.211964Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.211980Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.219633Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.219696Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.219711Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.220752Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.220831Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.220845Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.227662Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.227724Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.227740Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.228874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.228925Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.228939Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.235823Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.235889Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.235903Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.236876Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.236937Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.236952Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.244465Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.244521Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.244536Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.245202Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.245245Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.245260Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.252983Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.253046Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.253061Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.253645Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.253692Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.253710Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.261447Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.261506Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.261521Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.262492Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.262538Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:13.262554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:15.993826Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:15.993869Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17735, MsgBus: 3164 2026-01-10T12:20:43.513141Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804730064427:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.513186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.880205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.886098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.886185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.975815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.065752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.067390Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804730064406:2081] 1768047643492904 != 1768047643492907 2026-01-10T12:20:44.154361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.261152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.261177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.261190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.261330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.567938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.861772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.878079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:44.941400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.144280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.332216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:45.423792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.385862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821909935463:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.385978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.386601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821909935473:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.386684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.842305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.877718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.949894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.987677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.029282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.084218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.140879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.205772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.291996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826204903646:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.292103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.292233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826204903651:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.292271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706826204903652:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.292319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.296531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.309534Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706826204903655:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:20:48.398488Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706826204903708:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.513769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706804730064427:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.513829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:51.052193Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706839089805962:2533], SessionActorId: [1:7593706834794838606:2533], stat ... thod=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.291201Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.291219Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.294242Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.294288Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.294301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.301043Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.301761Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.301781Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.302956Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.303006Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.303020Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.310657Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.310744Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.310762Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.318872Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.318914Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.318942Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.325647Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.325692Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.325708Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.333271Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.333334Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.333351Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.342889Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.342954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.342971Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.352674Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.352756Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.352775Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.353204Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.353251Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.353268Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.362794Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.362875Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.362894Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.364232Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.364290Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.364307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.370862Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.370925Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.370945Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.373122Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.373183Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:16.373199Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:21:17.354747Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NmRiY2E1MWUtZDhmZjZmM2EtOGQ2MGZjODEtN2JhMmRkNTc=, ActorId: [4:7593706947671645750:2689], ActorState: ExecuteState, LegacyTraceId: 01kekxn3t84tprqt4gykpdhnx6, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/DataShard`" issue_code: 2001 severity: 1 } trace_id# 2026-01-10T12:21:17.358572Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976710670; |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.7%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 15898, MsgBus: 27692 2026-01-10T12:20:44.677400Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706811719533154:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.678432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:45.080160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:45.089398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:45.089524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:45.168460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:45.297811Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706811719533120:2081] 1768047644662349 != 1768047644662352 2026-01-10T12:20:45.303134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:45.314282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.352191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.352213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.352221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.352346Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.676861Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.830389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.898862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824604435895:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.899016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.899520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824604435907:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.899673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824604435909:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.899738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.900032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824604435912:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.900114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.904267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.915958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706824604435911:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:48.061516Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706828899403260:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.414612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.571053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.785747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706811719533154:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.791274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.855283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 29047, MsgBus: 11210 2026-01-10T12:20:53.273750Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706851355710408:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:53.326646Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:53.456183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.519604Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.533329Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706851355710356:2081] 1768047653265842 != 1768047653265845 2026-01-10T12:20:53.545803Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.545915Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.573255Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.793086Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:53.828683Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.828708Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.828716Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.828794Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:54.264449Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.401583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.413155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:56.993141Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706864240613127:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.993162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706864240613138:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.993208Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.993425Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706864240613142:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.993464Z ... PUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7593706914988658154:2964], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [3:7593706914988658130:2964]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[3:7593706914988658154:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:21:08.461871Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593706914988658148:2964], SessionActorId: [3:7593706914988658130:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7593706914988658130:2964]. 2026-01-10T12:21:08.462075Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Y2ZiYTkzNTItNDQ4OGI3YjktN2JhN2RlNTUtNzUzN2RjN2U=, ActorId: [3:7593706914988658130:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmv5cb1wd16js4kmd3w0n, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593706914988658183:2964] from: [3:7593706914988658148:2964] trace_id# 2026-01-10T12:21:08.462157Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706914988658183:2964] TxId: 281474976710666. Ctx: { TraceId: 01kekxmv5cb1wd16js4kmd3w0n, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2ZiYTkzNTItNDQ4OGI3YjktN2JhN2RlNTUtNzUzN2RjN2U=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:08.462373Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Y2ZiYTkzNTItNDQ4OGI3YjktN2JhN2RlNTUtNzUzN2RjN2U=, ActorId: [3:7593706914988658130:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmv5cb1wd16js4kmd3w0n, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 62927, MsgBus: 5192 2026-01-10T12:21:10.000443Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:10.000577Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:10.095122Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:10.095239Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:10.099969Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:10.108234Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706916931156865:2081] 1768047669933095 != 1768047669933098 2026-01-10T12:21:10.136465Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:10.271094Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:10.273254Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:10.273273Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:10.273282Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:10.273365Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:10.902578Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:10.912990Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:21:10.964756Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:14.109686Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706938405994209:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.109762Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706938405994222:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.109880Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.111931Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706938405994249:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.112030Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.113762Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:14.123645Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593706938405994248:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:14.200708Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593706938405994301:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:14.266883Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:14.335381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:15.490698Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:17.484225Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2026-01-10T12:21:17.484508Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037889 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:21:17.485921Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037889 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-10T12:21:17.486075Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [4:7593706951290904209:2963], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7593706951290904184:2963]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7593706951290904209:2963].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:21:17.486176Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7593706951290904203:2963], SessionActorId: [4:7593706951290904184:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7593706951290904184:2963]. 2026-01-10T12:21:17.486399Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ZjRhNTA1ODgtYTI4MTZjYTgtNzliYjdhZjItMjY4NmMxMjk=, ActorId: [4:7593706951290904184:2963], ActorState: ExecuteState, LegacyTraceId: 01kekxn3z1a9pchq0pccgqwc4f, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7593706951290904239:2963] from: [4:7593706951290904203:2963] trace_id# 2026-01-10T12:21:17.486486Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [4:7593706951290904239:2963] TxId: 281474976710666. Ctx: { TraceId: 01kekxn3z1a9pchq0pccgqwc4f, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZjRhNTA1ODgtYTI4MTZjYTgtNzliYjdhZjItMjY4NmMxMjk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:17.486774Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ZjRhNTA1ODgtYTI4MTZjYTgtNzliYjdhZjItMjY4NmMxMjk=, ActorId: [4:7593706951290904184:2963], ActorState: ExecuteState, LegacyTraceId: 01kekxn3z1a9pchq0pccgqwc4f, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |92.7%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 24957, MsgBus: 10817 2026-01-10T12:20:43.912492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706808648861020:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.913304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.271579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.271678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.278386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.390005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.390206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.391865Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706808648860900:2081] 1768047643894128 != 1768047643894131 2026-01-10T12:20:44.640563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.640578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.640584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.640639Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.665935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.914930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.135896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.148722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:47.364726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825828730970:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.364733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825828730979:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.364839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.365366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825828730985:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.365459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.369447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.382310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706825828730984:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.538269Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706825828731037:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.877129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:48.065844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.066135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.066402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.066529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.066628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.066731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.066845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.066952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.067057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:48.067159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:48.067248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:48.067345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:48.067471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706825828731210:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:48.109250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.109335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.109534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.109652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.109788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.109906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.110045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.110198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706825828731212:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.110300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... GFmZGM4YmE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:09.261405Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NDA2NmEyYjEtY2YyMmI1ZDAtNTJjM2M3NDAtZGFmZGM4YmE=, ActorId: [2:7593706912083953951:2967], ActorState: ExecuteState, LegacyTraceId: 01kekxmvz06bjj2pq7b1wse5jn, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:21:09.261684Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261707Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261720Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261735Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261748Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261760Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261774Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.261788Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262206Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262241Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262253Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262264Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262276Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262289Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262300Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262310Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262321Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262331Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262345Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:09.262357Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7593706916378921856:2967], SessionActorId: [2:7593706912083953951:2967], StateRollback: unknown message 278003713 Trying to start YDB, gRPC: 32531, MsgBus: 14997 2026-01-10T12:21:10.818225Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706921140790723:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:10.818341Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:10.852154Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:10.975344Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:10.975431Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:10.982180Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:10.987831Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:21:10.988530Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:21:11.000254Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:11.020677Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:11.020706Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:11.020717Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:11.020812Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:11.115019Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:11.551584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:11.872188Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:14.568825Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938320660770:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.568828Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938320660762:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.568897Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.569132Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938320660777:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.569185Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.572294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:14.582428Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706938320660776:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:14.643149Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706938320660829:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:14.708503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:14.760907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:15.936645Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706921140790723:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:15.938719Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:16.256599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 19089, MsgBus: 21010 2026-01-10T12:20:43.143197Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706805238454254:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.143319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.426407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.465676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.465807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.508201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.558177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.564225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706805238454204:2081] 1768047643138081 != 1768047643138084 2026-01-10T12:20:43.684183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.704922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.704945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.704952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.705046Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.154882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.244533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.252248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.668688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818123356993:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.668713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818123356982:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.668848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.670921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706818123356997:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.671057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.672991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.684147Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706818123356996:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:46.802979Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706818123357049:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.046008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.148311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.119712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.143767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706805238454254:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.143841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:50.552786Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710670; 2026-01-10T12:20:50.579698Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7593706835303234441:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:7593706831008266832:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:7593706835303234441:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:20:50.583390Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7593706831008266832:2967]. 2026-01-10T12:20:50.583596Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583611Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583623Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583641Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583653Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583665Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583676Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583689Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583699Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583711Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583723Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583735Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583746Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583757Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583768Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583779Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583791Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583803Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7593706835303234435:2967], SessionActorId: [1:7593706831008266832:2967], StateRollback: unknown message 278003713 2026-01-10T12:20:50.583814Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1: ... _COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710673; 2026-01-10T12:21:07.562363Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710673; 2026-01-10T12:21:07.562364Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710673; Trying to start YDB, gRPC: 17142, MsgBus: 65332 2026-01-10T12:21:10.320631Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:10.325614Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:10.414308Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:10.426043Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:10.426141Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:10.447009Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:10.519179Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:10.524635Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:10.524655Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:10.524663Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:10.524740Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:10.979074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:11.300703Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:14.177182Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938012345719:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.177495Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.177788Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938012345747:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.182712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:14.183143Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938012345759:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.183234Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.187876Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706938012345778:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.188298Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:14.198564Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706938012345749:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:14.303491Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706938012345804:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:14.357784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:14.534111Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:15.833114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:18.032966Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2026-01-10T12:21:18.053974Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7593706955192223663:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [3:7593706950897255749:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[3:7593706955192223663:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:21:18.054100Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7593706950897255749:2967]. 2026-01-10T12:21:18.054320Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054341Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054352Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054365Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054377Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054460Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YjQyYjFmOTAtZTcwZjg3ODYtMzBlOGY1ZmQtZmM5NmJkMWQ=, ActorId: [3:7593706950897255749:2967], ActorState: ExecuteState, LegacyTraceId: 01kekxn4hsbsz4knc2q0hvnbkk, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593706955192223658:2967] from: [3:7593706955192223657:2967] trace_id# 2026-01-10T12:21:18.054554Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706955192223658:2967] TxId: 281474976715668. Ctx: { TraceId: 01kekxn4hsbsz4knc2q0hvnbkk, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YjQyYjFmOTAtZTcwZjg3ODYtMzBlOGY1ZmQtZmM5NmJkMWQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:18.054682Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.054907Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjQyYjFmOTAtZTcwZjg3ODYtMzBlOGY1ZmQtZmM5NmJkMWQ=, ActorId: [3:7593706950897255749:2967], ActorState: ExecuteState, LegacyTraceId: 01kekxn4hsbsz4knc2q0hvnbkk, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:21:18.055250Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.055266Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.055277Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.055289Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 2026-01-10T12:21:18.055302Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7593706955192223657:2967], SessionActorId: [3:7593706950897255749:2967], StateRollback: unknown message 278003713 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TraverseDatashard::TraverseOneTable >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] Test command err: Trying to start YDB, gRPC: 23299, MsgBus: 18688 2026-01-10T12:20:43.670003Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804653645969:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.670066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.124977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.134538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.134641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.228725Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804653645928:2081] 1768047643668243 != 1768047643668246 2026-01-10T12:20:44.239294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.243034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.424740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.436762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.436778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.436784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.436844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.686597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.207425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.358339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821833516004:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.358351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821833516018:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.358410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.358636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821833516023:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.358679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.361946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.370973Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706821833516022:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.518788Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706821833516075:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.785974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.918484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.737996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706804653645969:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.738784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.119596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 28523, MsgBus: 28383 2026-01-10T12:20:52.909316Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:52.909449Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:53.080238Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706843901463929:2081] 1768047652799545 != 1768047652799548 2026-01-10T12:20:53.096534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.096622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.103476Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.193278Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.256622Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.277913Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:20:53.278244Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:20:53.308389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.308408Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.308415Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.308483Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.576579Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:53.876388Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.119803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.132742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:56.648686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861081333978:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.648869Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.649316Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861081334013:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.649352Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861081334014:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.649387Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.653518Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: ... source pool default not found or you don't have access permissions } 2026-01-10T12:21:04.528184Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706895984085548:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.528272Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:04.541322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:04.558855Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706895984085546:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:04.619593Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706895984085600:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:04.701791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:04.752762Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.911637Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706878804215523:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:05.930375Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:06.014530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:10.291748Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593706921753897416:2964], SessionActorId: [3:7593706908868995451:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[3:7593706908868995451:2964]. 2026-01-10T12:21:10.291969Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=NjVkMWM3NDQtNTU5NWMwNGUtMjFiNmFjOWItNTUzYzQ3NzQ=, ActorId: [3:7593706908868995451:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmwnb9nfhcvfvz59t9xnc, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593706921753897417:2964] from: [3:7593706921753897416:2964] trace_id# 2026-01-10T12:21:10.292034Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706921753897417:2964] TxId: 281474976710666. Ctx: { TraceId: 01kekxmwnb9nfhcvfvz59t9xnc, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NjVkMWM3NDQtNTU5NWMwNGUtMjFiNmFjOWItNTUzYzQ3NzQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-10T12:21:10.292279Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NjVkMWM3NDQtNTU5NWMwNGUtMjFiNmFjOWItNTUzYzQ3NzQ=, ActorId: [3:7593706908868995451:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmwnb9nfhcvfvz59t9xnc, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 29714, MsgBus: 12156 2026-01-10T12:21:11.700562Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706924821962005:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:11.710381Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:11.725705Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:11.812306Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:11.822829Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:11.822935Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:11.843409Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:11.915383Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:11.932412Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:11.932442Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:11.932450Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:11.932534Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:12.487818Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:12.500813Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:21:12.697801Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:15.738308Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706942001831938:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:15.738307Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706942001831948:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:15.738396Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:15.740114Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706942001831955:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:15.740213Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:15.743083Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:15.760819Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593706942001831954:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:15.860292Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593706942001832007:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:15.929304Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:16.043015Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:16.993795Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593706924821962005:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:17.016490Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:17.327490Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation >> AnalyzeColumnshard::AnalyzeEmptyTable >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> TxUsage::WriteToTopic_Demo_42_Query >> AnalyzeColumnshard::AnalyzeSameOperationId >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert >> AnalyzeColumnshard::AnalyzeStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] Test command err: Trying to start YDB, gRPC: 8029, MsgBus: 4512 ... waiting for SysViewsRoster update finished 2026-01-10T12:11:56.319152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:11:56.447802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:11:56.456688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:11:56.457140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:11:56.457251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:11:56.873905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:11:56.874053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:11:56.881470Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047112442040 != 1768047112442043 2026-01-10T12:11:56.894973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:11:56.949376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:11:57.106769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:11:57.467590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:11:57.467663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:11:57.467693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:11:57.468308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:11:57.481746Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:11:57.961722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:11:58.011990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.012122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.012206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.013289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.013495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:11:58.017646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:11:58.098157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:932:2791], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:11:58.174122Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:11:58.660136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:11:59.225129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:12:03.226881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 17106, MsgBus: 17188 ... waiting for SysViewsRoster update finished 2026-01-10T12:12:13.569395Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:12:13.581967Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:12:13.582352Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:12:13.582421Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:12:13.961976Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:12:13.962134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:12:13.999560Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047129245368 != 1768047129245372 2026-01-10T12:12:14.006241Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:12:14.056616Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:12:14.195483Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:12:14.750767Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:12:14.750835Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:12:14.750870Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:12:14.751278Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:12:14.763845Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:12:15.416201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:12:15.459453Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:915:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.459563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.459987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.464265Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.464413Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:12:15.478859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281 ... erver/server_builder.cc:396:5 #10 0x0000212ed5e9 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #11 0x00003ff41445 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #12 0x00003ff45817 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #13 0x00004e61ed08 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #14 0x00001b4f7e99 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #15 0x00001b4f7e99 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #16 0x00001b6577e4 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #17 0x00001b61ed17 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #18 0x00001b61ed17 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b61ed17 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b61ed17 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b61ed17 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001c062879 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001c0308e7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b61e06c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #27 0x00001c03209f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001c05c6dc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7f9390100d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87a0fd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce5a99 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce5a99 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce5a99 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce5a99 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce5a99 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021ce5a99 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021ce5a99 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1569:13 #8 0x000021ce5a99 in grpc_core::Server::Start() /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:702:17 #9 0x000021cf4bbe in grpc_server_start /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1715:37 #10 0x000021d19367 in grpc::Server::Start(grpc::ServerCompletionQueue**, unsigned long) /-S/contrib/libs/grpc/src/cpp/server/server_cc.cc:1214:3 #11 0x0000212fe034 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:445:11 #12 0x0000212ed5e9 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #13 0x00003ff41445 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003ff45817 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e61ed08 in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b4f7e99 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #17 0x00001b4f7e99 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #18 0x00001b6577e4 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #19 0x00001b61ed17 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #20 0x00001b61ed17 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #21 0x00001b61ed17 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #22 0x00001b61ed17 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #23 0x00001b61ed17 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #24 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #25 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #26 0x00001c062879 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #27 0x00001c0308e7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x00001b61e06c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #29 0x00001c03209f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x00001c05c6dc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #31 0x7f9390100d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b87a0fd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021369c91 in grpc_core::internal::StatusAllocHeapPtr(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/gprpp/status_helper.cc:427:25 #2 0x00002149ed72 in grpc_core::CallCombiner::Cancel(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/call_combiner.cc:233:25 #3 0x00002144556e in grpc_core::FilterStackCall::CancelWithError(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1037:18 #4 0x000021440c8c in grpc_core::Call::CancelWithStatus(grpc_status_code, char const*) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:366:3 #5 0x000021462d93 in grpc_call_cancel_with_status /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3499:30 #6 0x000021d428b6 in grpc::ServerContextBase::TryCancel() const /-S/contrib/libs/grpc/src/cpp/server/server_context.cc:347:7 #7 0x0000212ea24c in NYdbGrpc::TGrpcServiceProtectiable::StopService() /-S/ydb/library/grpc/server/grpc_server.cpp:64:26 #8 0x0000212f0a5e in NYdbGrpc::TGRpcServer::Stop() /-S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b39c306 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b39c306 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b39c306 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b65782c in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:70:5 #13 0x00001b65782c in reset /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:286:7 #14 0x00001b65782c in ~unique_ptr /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:255:71 #15 0x00001b65782c in ~TTableDataModificationTester /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:15:7 #16 0x00001b65782c in ~TIsolationSetting /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:391:11 #17 0x00001b65782c in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:510:9 #18 0x00001b61ed17 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #19 0x00001b61ed17 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #20 0x00001b61ed17 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #21 0x00001b61ed17 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #22 0x00001b61ed17 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #23 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #24 0x00001c062879 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #25 0x00001c062879 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #26 0x00001c0308e7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #27 0x00001b61e06c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #28 0x00001c03209f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #29 0x00001c05c6dc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #30 0x7f9390100d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 429042 byte(s) leaked in 5259 allocation(s). |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> AnalyzeDatashard::AnalyzeOneTable >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table >> AnalyzeDatashard::AnalyzeTwoTables >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert >> TxUsage::WriteToTopic_Demo_13_Table [GOOD] >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] >> TxUsage::WriteToTopic_Demo_13_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 27545, MsgBus: 3022 2026-01-10T12:20:43.013118Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804530144501:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.013235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.221786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.245204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.245337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.281662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.316270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.320181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804530144477:2081] 1768047643012317 != 1768047643012320 2026-01-10T12:20:43.390073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.432880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.432899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.432906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.432967Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:43.934734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:43.947645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:44.028263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:46.210497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817415047252:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.210502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817415047262:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.210630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.210923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817415047267:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.211007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.214564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.224718Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706817415047266:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:46.334855Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706817415047319:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:46.578056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:46.702525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:46.702741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:46.702937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:46.703044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:46.703141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:46.703231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:46.703372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:46.703515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:46.703645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:46.703748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:46.703907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:46.704016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:46.704128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706817415047488:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:46.718186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:46.718305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:46.718486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:46.718598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:46.718708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:46.718843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:46.718970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:46.719078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706817415047489:2340];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:46.719193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... -10T12:21:09.556191Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:09.556285Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:10.077980Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:10.296680Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:13.325663Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706935229859788:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.325724Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706935229859805:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.325780Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.328508Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706935229859811:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.328633Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:13.329525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:13.341811Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706935229859810:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:13.414171Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706935229859863:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:13.475379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:13.538745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:14.549847Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706918049989899:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:14.551866Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:14.857537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 24596, MsgBus: 2517 2026-01-10T12:21:18.256745Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706956062648801:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:18.257398Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:18.292899Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:18.392981Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:18.394750Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706956062648771:2081] 1768047678249822 != 1768047678249825 2026-01-10T12:21:18.422345Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:18.422444Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:18.440753Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:18.505565Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:18.516739Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:18.516763Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:18.516771Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:18.516861Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:19.211425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:19.265722Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:22.222292Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706973242518849:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:22.222293Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706973242518841:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:22.222395Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:22.222754Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593706973242518856:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:22.222833Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:22.226031Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:22.236286Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593706973242518855:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:22.302056Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593706973242518910:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:22.360846Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:22.400555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:23.252812Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593706956062648801:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:23.252884Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:23.445525Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 24210, MsgBus: 1720 2026-01-10T12:20:43.367852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807984793815:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.367915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.668934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.673463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.673571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.724275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.814156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.976203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.997523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.997547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.997562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.997651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.375700Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.586382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.594646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.863473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820869696551:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.863491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820869696522:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.863603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.863893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820869696561:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.863951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.866914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.882674Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706820869696560:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:20:47.013338Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706820869696613:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.342428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:47.493415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.493709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.493971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.494200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.494333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.494445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.494446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.494520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.494570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.494682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.494817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.494914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.495008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:47.495132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:47.495215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.495260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7593706825164664081:2341];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:47.495371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.495478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.495592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.495712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.495837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.495944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706825164664090:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Restor ... n type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:05.009661Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:929:2788], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:05.048010Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:986:2826] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:05.117192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.384298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:08.271908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:12.167156Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=281474976715669; 2026-01-10T12:21:12.181168Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 281474976715669 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:21:12.181492Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:21:12.182039Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [2:8993:8422], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [2:8762:8422]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[2:8993:8422].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2026-01-10T12:21:12.182688Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:8784:8422], SessionActorId: [2:8762:8422], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[2:8762:8422]. 2026-01-10T12:21:12.183220Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YWRmZmFmOGItZDRhYmY2ZGQtNDkwNmRlZTEtNDAzZTc0ZTc=, ActorId: [2:8762:8422], ActorState: ExecuteState, LegacyTraceId: 01kekxmyb95zcqmxh6v76xnzdb, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:8988:8422] from: [2:8784:8422] trace_id# 2026-01-10T12:21:12.183478Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:8988:8422] TxId: 281474976715665. Ctx: { TraceId: 01kekxmyb95zcqmxh6v76xnzdb, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWRmZmFmOGItZDRhYmY2ZGQtNDkwNmRlZTEtNDAzZTc0ZTc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-10T12:21:12.183961Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YWRmZmFmOGItZDRhYmY2ZGQtNDkwNmRlZTEtNDAzZTc0ZTc=, ActorId: [2:8762:8422], ActorState: ExecuteState, LegacyTraceId: 01kekxmyb95zcqmxh6v76xnzdb, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 20101, MsgBus: 27822 ... waiting for SysViewsRoster update finished 2026-01-10T12:21:17.608049Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:17.616725Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:17.621917Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:17.622237Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:17.622517Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:17.927253Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:17.927378Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:17.957349Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1768047673630237 != 1768047673630241 2026-01-10T12:21:17.964128Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:18.012056Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:18.115774Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:18.532365Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:18.532442Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:18.532499Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:18.533295Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:18.550644Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:18.994550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:19.039155Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:910:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:19.039273Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:921:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:19.039360Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:19.040569Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:19.040783Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:19.046011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:19.144970Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:924:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:19.170565Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:981:2821] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:19.236668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:19.474836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:22.347788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 2382, MsgBus: 23842 2026-01-10T12:20:44.658542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706812742675884:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.661286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.738761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:45.072915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:45.095155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:45.095266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:45.152900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:45.155790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:45.316703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.316729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.316737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.316828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.403977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.680226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.780731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:48.224386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829922545911:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.224510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829922545900:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.224958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.229401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829922545915:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.229489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.230146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.243076Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706829922545914:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:20:48.381902Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706829922545967:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.707535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:48.979796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.982473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.982703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.982830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.982936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.983065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.983174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.983287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.983413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:48.983548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:48.983658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:48.983781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:48.983900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706829922546142:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:48.988284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.988369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.988555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.988635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.988740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.988856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.988946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.989068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.989166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7593706829922546150:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norma ... state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:14.484609Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:14.484630Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:14.520032Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxmmk31jdz8nwxj3azg6b3", SessionId: ydb://session/3?node_id=2&id=NWY1YmFhMjUtYzVhN2NiMzMtN2ZkMjE1YWUtODBmNGY2NDY=, Slow query, duration: 10.098553s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-10T12:21:15.944516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:15.944545Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:17.209946Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Mzg0ZTU5MjAtZGZhZWY2NDctNjg5NDYwYmMtOGI4YzdkMjY=, ActorId: [2:7593706941776118363:3647], ActorState: ReadyState, LegacyTraceId: 01kekxn3rea8f0h8qhshr7081n, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01kekxn1kd1j75dteb3j4ta7y2" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 17280, MsgBus: 14030 2026-01-10T12:21:19.266776Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593706962117394124:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:19.266829Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:19.290760Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:19.416193Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:19.417440Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:19.417530Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:19.437127Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593706962117394085:2081] 1768047679265211 != 1768047679265214 2026-01-10T12:21:19.444407Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:19.457525Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:19.515539Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:19.515568Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:19.515576Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:19.515651Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:19.598915Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:20.016469Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:20.274631Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:23.252240Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706979297264153:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:23.252336Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:23.252372Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706979297264165:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:23.252561Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706979297264167:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:23.252609Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:23.257315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:23.270244Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706979297264168:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:23.329125Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706979297264222:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:23.395457Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:23.465775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:24.324368Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706962117394124:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:24.326713Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:24.382122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:26.397306Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593706992182174412:2964], SessionActorId: [3:7593706987887206842:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 . sessionActorId=[3:7593706987887206842:2964]. 2026-01-10T12:21:26.397536Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=N2Q4MDYwMzAtN2ZhODU0YWEtN2I5ODgwODAtMjIxYjgxZjU=, ActorId: [3:7593706987887206842:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxncqr34g43a7pp95me4z8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593706992182174413:2964] from: [3:7593706992182174412:2964] trace_id# 2026-01-10T12:21:26.397665Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706992182174413:2964] TxId: 281474976715667. Ctx: { TraceId: 01kekxncqr34g43a7pp95me4z8, Database: /Root, SessionId: ydb://session/3?node_id=3&id=N2Q4MDYwMzAtN2ZhODU0YWEtN2I5ODgwODAtMjIxYjgxZjU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } trace_id# 2026-01-10T12:21:26.397976Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=N2Q4MDYwMzAtN2ZhODU0YWEtN2I5ODgwODAtMjIxYjgxZjU=, ActorId: [3:7593706987887206842:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxncqr34g43a7pp95me4z8, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> Describe::LocationWithKillTablets [GOOD] >> Describe::DescribePartitionPermissions >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] >> TraverseDatashard::TraverseTwoTables >> TxUsage::WriteToTopic_Demo_11_Table [GOOD] >> TraverseDatashard::TraverseOneTableServerless [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] >> TxUsage::WriteToTopic_Demo_11_Query >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [GOOD] >> TraverseDatashard::TraverseOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:21.664008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:21.781755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:21.790975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:21.791423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:21.791506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:22.188261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:22.283559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:22.283671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:22.317946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:22.396837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:23.080591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:23.081737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:23.081787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:23.081812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:23.081929Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:23.148029Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:23.708256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:26.806846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:26.816309Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:26.820602Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:26.855805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:26.855967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:26.896098Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:26.898210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.087475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.087661Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.089057Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.089741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.090567Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.091259Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.091642Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.091779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.092000Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.092256Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.092396Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.108450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.380729Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.416719Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:27.416849Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:27.468468Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:27.470426Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:27.470681Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:27.470747Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:27.470804Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:27.470866Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:27.470927Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:27.470986Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:27.471829Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:27.472692Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2453] 2026-01-10T12:21:27.476755Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:21:27.488658Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Describe result: PathErrorUnknown 2026-01-10T12:21:27.488726Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Creating table 2026-01-10T12:21:27.488814Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:27.490901Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.490986Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2145:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.523905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2217:2646] 2026-01-10T12:21:27.524230Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2217:2646], schemeshard id = 72075186224037897 2026-01-10T12:21:27.556064Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2248:2657], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:27.578550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:27.599806Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:27.599946Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:27.609535Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:27.812710Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:27.853763Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:21:28.220392Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:28.357192Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:28.357265Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Column diff is empty, finishing 2026-01-10T12:21:29.117147Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:29.155726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:29.798877Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:29.853007Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8275: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2026-01-10T12:21:29.853073Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8291: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2026-01-10T12:21:29.853169Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:3073:3127], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2026-01-10T12:21:29.858597Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3078:3129] 2026-01-10T12:21:29.859455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3078:3129], schemeshard id = 72075186224037899 2026-01-10T12:21:31.389053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3418:3576], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.389235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.389790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3436:3581], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.389912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.409837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:31.832911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3718:3627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.833056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.953153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3721:3629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.953265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.954306Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3725:3633]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:31.954488Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:31.954716Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:21:31.954764Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3728:3636] 2026-01-10T12:21:31.954841Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3728:3636] 2026-01-10T12:21:31.955317Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3729:3542] 2026-01-10T12:21:31.955485Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3728:3636], server id = [2:3729:3542], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:31.955747Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3729:3542], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:31.955800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:31.955986Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:31.956049Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3725:3633], StatRequests.size() = 1 2026-01-10T12:21:32.000859Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:32.001672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3733:3640], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.001837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.002347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3737:3644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.002426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.002650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3740:3647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.011533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:32.127153Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:32.127234Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:32.169734Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3728:3636], schemeshard count = 1 2026-01-10T12:21:32.507692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3742:3649], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:21:32.783520Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3853:3715] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:32.800973Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3876:3731]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:32.801164Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:32.801202Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3876:3731], StatRequests.size() = 1 2026-01-10T12:21:33.135645Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3917:3587]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:33.138482Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:33.138550Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:33.139077Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:33.139125Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:21:33.139179Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:33.174022Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:33.185720Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:21:33.186263Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-10T12:21:33.186434Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:33.186573Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShard [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query >> KqpSinkTx::OlapInteractive [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:24.769807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:24.855605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:24.864168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:24.864518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:24.864578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:25.234645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:25.340036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:25.340190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:25.374296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:25.460711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:26.172487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:26.173948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:26.173998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:26.174032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:26.174181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:26.241537Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:26.801688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:30.187057Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:30.195308Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:30.198253Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:30.230107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.230250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.259285Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:30.261601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.454188Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.454299Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.455838Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.456374Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.456974Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.457834Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.458175Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.458442Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.458606Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.458881Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.458998Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.474922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.676367Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:30.721230Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:30.721341Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:30.759501Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:30.761164Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:30.761422Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:30.761486Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:30.761540Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:30.761601Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:30.761664Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:30.761719Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:30.762417Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:30.787592Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:30.787711Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:30.811680Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:30.812555Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:30.868664Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:30.872828Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:30.899985Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:30.900068Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:30.900184Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:30.913050Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:30.938764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:30.964535Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:30.964679Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:30.982343Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:31.169107Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:31.184088Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:31.521405Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:31.661793Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:31.661892Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:32.363294Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:32.561956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2731:3368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.562181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.562801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2749:3373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.562896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.582144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.067391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3034:3422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.067656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.141725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3038:3425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.141921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.143059Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3041:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:33.143286Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:33.143401Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3043:3430] 2026-01-10T12:21:33.143481Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3043:3430] 2026-01-10T12:21:33.144116Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3044:3174] 2026-01-10T12:21:33.144382Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3043:3430], server id = [2:3044:3174], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:33.144635Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3044:3174], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:33.144718Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:33.144980Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:33.145060Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3041:3428], StatRequests.size() = 1 2026-01-10T12:21:33.166907Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:33.167427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.167713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.168360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3052:3438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.168463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.168697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3055:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.176902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:33.269743Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:33.269825Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:33.357695Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3043:3430], schemeshard count = 1 2026-01-10T12:21:33.609080Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3057:3443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:21:33.775752Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3160:3510] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:33.791060Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3183:3526]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:33.791278Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:33.791321Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3183:3526], StatRequests.size() = 1 2026-01-10T12:21:33.922577Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3231:3215]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:33.925258Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:33.925317Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:33.925860Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:33.925904Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:33.925953Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:33.935901Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:33.941530Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:21:33.941864Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-10T12:21:33.942082Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:33.942164Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] Test command err: Trying to start YDB, gRPC: 18772, MsgBus: 2079 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:46.043468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:46.171890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:46.183109Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:46.183606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:46.183757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:46.550516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:46.550675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:46.557766Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047643179341 != 1768047643179344 2026-01-10T12:20:46.571579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:46.620655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:46.746938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:47.103304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:47.103405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:47.103450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:47.103979Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:47.116470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:47.443533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.541298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.831906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.269249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.576895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.611202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.611657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.612536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.612633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.644046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.835946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.145034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.487338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.772553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.065427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.386662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.763428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.139361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.139502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.139862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.139961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.140012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.145485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:52.331283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:52.407431Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.338289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table. ... 50545Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:25.850632Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:26.151797Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:26.151985Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:26.177398Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1768047682117745 != 1768047682117748 2026-01-10T12:21:26.183737Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:26.230236Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:26.335354Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:26.686753Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:26.686812Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:26.686857Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:26.687174Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:26.699640Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:27.166814Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:27.235199Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:27.471112Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:27.859597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:28.141654Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:28.671097Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1905:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:28.671641Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:28.672429Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1979:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:28.672534Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:28.723601Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:28.936222Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:29.214028Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:29.456943Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:29.691267Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:29.948617Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:30.207556Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:30.506614Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:30.871360Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2791:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:30.871517Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:30.872063Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:30.872178Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:30.872272Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:30.880240Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:31.056207Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2800:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:21:31.115029Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2860:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:32.932572Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.179172Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.493871Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] Test command err: Trying to start YDB, gRPC: 19423, MsgBus: 22954 2026-01-10T12:20:43.164322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706808707729061:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.164655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.427118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.427256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.448779Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.473775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.478887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.481444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706808707728953:2081] 1768047643148613 != 1768047643148616 2026-01-10T12:20:43.639718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.657910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.664138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.664165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.664267Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.163337Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.183916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.202510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.433634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821592631736:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.433633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821592631715:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.433724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.433986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821592631747:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.434026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.437386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.447045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706821592631746:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:46.530039Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706821592631799:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:46.823978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:46.929785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.892333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.553642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706808707729061:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.651086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2015, MsgBus: 2840 2026-01-10T12:20:51.185561Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:51.185712Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:51.291515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:51.291793Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:51.313363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:51.321200Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:51.324354Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706841710176605:2081] 1768047651020859 != 1768047651020862 2026-01-10T12:20:51.476255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.501688Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:51.501707Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:51.501715Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:51.501823Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:52.057698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:52.080154Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.883342Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706854595079356:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.883399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706854595079389:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.883483Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.884140Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706854595079396:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.884264Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.886945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.914348Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593706854595079394:2327], DatabaseId: /Root, PoolId: default, ... Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.785399Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.785465Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.785501Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.792746Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.792830Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.792865Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.801323Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.801413Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.801452Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.808472Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.808551Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.808590Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.815579Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.815644Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.815676Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.822427Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.822513Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.822554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.829878Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.829959Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.829997Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.838126Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.838205Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.838243Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.846965Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.847051Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.847090Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.854728Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.854816Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.854855Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:30.981694Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxmzc7e346j372pvw95svt", SessionId: ydb://session/3?node_id=3&id=ZWM1MzI0YTQtYzAzMTcyMWYtOTk2OTBkMjUtNDkwNDZmYzc=, Slow query, duration: 18.264683s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-10T12:21:32.877108Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;commit_tx_id=281474976710667;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2026-01-10T12:21:32.966179Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [3:9778:9134], TxId: 281474976710665, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxnjf1bh8a8zrc5c91fw59. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YmMyYjZlODQtZTk3Y2QxM2UtMTBhZGEwYzgtNTM0OTc2NDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-10T12:21:32.967200Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:9778:9134], TxId: 281474976710665, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxnjf1bh8a8zrc5c91fw59. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YmMyYjZlODQtZTk3Y2QxM2UtMTBhZGEwYzgtNTM0OTc2NDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-10T12:21:32.968839Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YmMyYjZlODQtZTk3Y2QxM2UtMTBhZGEwYzgtNTM0OTc2NDY=, ActorId: [3:9532:8893], ActorState: ExecuteState, LegacyTraceId: 01kekxnjf1bh8a8zrc5c91fw59, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=14;stage=FO::FETCHING; |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 11644, MsgBus: 26972 2026-01-10T12:20:43.320426Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807609758708:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.320730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.606660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.606775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:43.645863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:43.681285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.708353Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:43.712422Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706807609758593:2081] 1768047643312935 != 1768047643312938 2026-01-10T12:20:43.866733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:43.939982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:43.940007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:43.940015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:43.940098Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.336579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.527430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.540680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.759482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820494661363:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.759486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820494661379:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.759597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.759949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706820494661383:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.760050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.763348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.772782Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706820494661382:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:46.873689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706820494661435:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.138341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.264700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.245601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.348809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706807609758708:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.349148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12989, MsgBus: 9093 2026-01-10T12:20:52.340179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:52.350622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:52.360245Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706846846100085:2081] 1768047652181603 != 1768047652181606 2026-01-10T12:20:52.428379Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:52.429546Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:52.429622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:52.444741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:52.568890Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:52.624675Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:52.624697Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:52.624709Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:52.624784Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.276261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:53.397608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:53.405234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:55.846267Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706859731002848:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.846331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706859731002833:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.846431Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.846895Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706859731002871:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.846987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.848571Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706859731002877:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:55.848697Z node 2 :KQP_WORKLOAD_SERVICE ... log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.982876Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.982895Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.989522Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.989583Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.989600Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.992584Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.992646Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.992691Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.998681Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.998745Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.998757Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.000936Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.001000Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.001016Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.007134Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.007208Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.007226Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.008029Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.008068Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.008096Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.015547Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.015629Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.015661Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.016850Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.016891Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.016904Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.023782Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.023848Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.023864Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.024242Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.024282Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.024296Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031778Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031825Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031837Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031899Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031951Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:32.031975Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.604446Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:33.604497Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:34.036215Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;commit_tx_id=281474976715668;commit_lock_id=281474976715665;fline=manager.cpp:80;broken_lock_id=281474976715666; 2026-01-10T12:21:34.069787Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7593707025624256356:4628], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxnm7776gnad3hkbnr8vaj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YTgzYjk2OGItNDBmODcyYTEtNmNjMDFkODQtODk2MmVlNGQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-10T12:21:34.069917Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7593707025624256356:4628], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxnm7776gnad3hkbnr8vaj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YTgzYjk2OGItNDBmODcyYTEtNmNjMDFkODQtODk2MmVlNGQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-10T12:21:34.070463Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YTgzYjk2OGItNDBmODcyYTEtNmNjMDFkODQtODk2MmVlNGQ=, ActorId: [4:7593707017034319865:3564], ActorState: ExecuteState, LegacyTraceId: 01kekxnm7776gnad3hkbnr8vaj, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 1600, MsgBus: 20179 2026-01-10T12:20:43.674826Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804928735620:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.674878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.765788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:44.084099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.084208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.136415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.151125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.156251Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804928735582:2081] 1768047643673385 != 1768047643673388 2026-01-10T12:20:44.169039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.264090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.264112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.264118Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.264180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.386192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.714380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.873068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:46.986699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817813638342:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.986722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817813638348:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.986797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.987124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706817813638373:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.987267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.990621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.002464Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706817813638372:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.125149Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706822108605721:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.361296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:47.537475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.537475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.537714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.537923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.537942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.538051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.538075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.538159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.538194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.538242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.538307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.538351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.538422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.538454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.538564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.538592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.538693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.539302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.539324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.539434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706822108605906:2342];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.539460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7593706822108605888:2339];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:47.539565 ... nternal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.620540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.621900Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.621974Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.621993Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.629484Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.629557Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.629581Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.631904Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.631976Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.631996Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.637862Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.637932Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.637954Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.641488Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.641566Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.641588Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.645459Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.645527Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.645548Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.653784Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.653852Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.653874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.661907Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.661978Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.662001Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.670412Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.670486Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.670508Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.679162Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.679232Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.679255Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.687635Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.687704Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.687726Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.696505Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.696569Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.696588Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.706437Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.706498Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.706515Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.714843Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.714917Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.714941Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.727625Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.727739Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.727768Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.530325Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:33.530355Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 28931, MsgBus: 28110 2026-01-10T12:20:43.486042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706806264483020:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.486098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.938973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.939097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.024433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.027945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.078885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.307574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.307602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.307632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.307754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.318214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.511879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.912966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.922244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:46.935911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706819149385755:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.936007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706819149385749:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.936363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.936646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706819149385765:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.936744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:46.940211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:46.950476Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706819149385763:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.034829Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706823444353112:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.317899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:47.449531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.449531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.449709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.449868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.449948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.449999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.450058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.450125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.450196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.450281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.450304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.450399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.450403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.450477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.450489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:47.450592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.450608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:47.450687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.450706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706823444353281:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:47.450798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.450906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.451010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706823444353280:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Resto ... nternal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.395221Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.395693Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.395749Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.395766Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.403441Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.403508Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.403527Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.403959Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.404037Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.404065Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412277Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412345Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412387Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412763Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.412781Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.420936Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.421020Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.421043Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.423511Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.423587Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.423605Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.430087Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.430157Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.430179Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.433996Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.434059Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.434076Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.438544Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.438611Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.438631Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038029;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.441622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.441700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.441724Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.447335Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.447406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.447430Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.450232Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.450319Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.450344Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.458018Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.458129Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.458158Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.463034Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.463088Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:31.463106Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.704285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:33.704308Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 20943, MsgBus: 24611 2026-01-10T12:20:44.494119Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706809854048641:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.497035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.848343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.938028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706809854048613:2081] 1768047644491647 != 1768047644491650 2026-01-10T12:20:44.947355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.957963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.958061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.969864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:45.120417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.198734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.198779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.198785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.198864Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.524065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.665085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.674422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:48.005313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706827033918689:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.005393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706827033918664:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.005526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.006254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706827033918702:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.006330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.009621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.022051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706827033918701:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:48.163417Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706827033918754:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.461751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:48.651166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.651438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.651725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.651831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.651950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.652095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:48.652121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.652153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:48.652250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.652267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:48.652359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.652370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:48.652475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:48.652476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:48.652584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:48.652600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:48.652663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:48.652689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:48.652763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:48.652784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7593706827033918922:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:48.652970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7593706827033918903:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:48.653159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_ ... nd to: [3:7593706990135343064:2964] from: [3:7593706990135343063:2964] trace_id# 2026-01-10T12:21:26.060041Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593706990135343064:2964] TxId: 281474976710665. Ctx: { TraceId: 01kekxncac7amnwh4ndkgvz7ek, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2NlMTFlMTgtNzdiMDhjMTQtODRhM2QxNzItNzVlZDI2MTY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:26.060411Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2NlMTFlMTgtNzdiMDhjMTQtODRhM2QxNzItNzVlZDI2MTY=, ActorId: [3:7593706985840375723:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxncac7amnwh4ndkgvz7ek, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:21:26.061672Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710665; 2026-01-10T12:21:26.061841Z node 3 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1768047686104 : 281474976710665] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 19870, MsgBus: 7796 2026-01-10T12:21:27.628587Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593706994862266545:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:27.628650Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:27.672253Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:27.775916Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.776036Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.777284Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.778888Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593706994862266517:2081] 1768047687627557 != 1768047687627560 2026-01-10T12:21:27.796808Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.876811Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:27.876833Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:27.876838Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:27.876919Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:27.889946Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:28.403956Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:28.415129Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:21:28.640647Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:31.614251Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707012042136595:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.614250Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707012042136588:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.614344Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.614631Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707012042136604:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.614699Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:31.618463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:31.628928Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593707012042136603:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:31.732169Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593707012042136658:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:31.787146Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:31.828629Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.757087Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593706994862266545:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:32.771396Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:33.044352Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.139965Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2026-01-10T12:21:35.141325Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7593707029222013991:2964], SessionActorId: [4:7593707024927046639:2964], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[4:7593707029222013991:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:21:35.141435Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7593707029222013991:2964], SessionActorId: [4:7593707024927046639:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7593707024927046639:2964]. 2026-01-10T12:21:35.141624Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=MjU5MDJmNzQtZTI1MDJhOTQtZGFhZGJmMjMtMTAxMDQ1MjY=, ActorId: [4:7593707024927046639:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxnn6j8nqyffyfne2q4mnb, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7593707029222013992:2964] from: [4:7593707029222013991:2964] trace_id# 2026-01-10T12:21:35.141740Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [4:7593707029222013992:2964] TxId: 281474976710666. Ctx: { TraceId: 01kekxnn6j8nqyffyfne2q4mnb, Database: /Root, SessionId: ydb://session/3?node_id=4&id=MjU5MDJmNzQtZTI1MDJhOTQtZGFhZGJmMjMtMTAxMDQ1MjY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:21:35.142079Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=MjU5MDJmNzQtZTI1MDJhOTQtZGFhZGJmMjMtMTAxMDQ1MjY=, ActorId: [4:7593707024927046639:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxnn6j8nqyffyfne2q4mnb, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:21:35.142640Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2026-01-10T12:21:35.142748Z node 4 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1768047695183 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_13_Query [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 12151, MsgBus: 32054 2026-01-10T12:20:44.561441Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706812335010359:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.562062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.980195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:45.010127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:45.011683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:45.093104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:45.099160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:45.132206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706812335010324:2081] 1768047644544288 != 1768047644544291 2026-01-10T12:20:45.262746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.292441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.292463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.292470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.292539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.571424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.760787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.767662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:48.023647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829514880409:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.023738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829514880397:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.024032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.024504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706829514880413:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.024580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:48.034079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:48.051771Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706829514880412:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:48.151800Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706829514880465:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.470479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.570884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.564395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706812335010359:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.564778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.837196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:59.968829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:20:59.968859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:05.461351Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7593706902529333354:2964], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxmr9fb6mpah8k9yd6dk2a. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MjgwZjk2YTYtZmQ4MGMyY2EtNGU0MjcxNzUtMzY1MTQ2NjU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1768047651552/18446744073709551615 shard 72075186224037889 with lowWatermark v1768047652007/18446744073709551615 (node# 1 state# Ready) } } 2026-01-10T12:21:05.461833Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593706902529333354:2964], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxmr9fb6mpah8k9yd6dk2a. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MjgwZjk2YTYtZmQ4MGMyY2EtNGU0MjcxNzUtMzY1MTQ2NjU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1768047651552/18446744073709551615 shard 72075186224037889 with lowWatermark v1768047652007/18446744073709551615 (node# 1 state# Ready) } }. 2026-01-10T12:21:05.462571Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MjgwZjk2YTYtZmQ4MGMyY2EtNGU0MjcxNzUtMzY1MTQ2NjU=, ActorId: [1:7593706842399790315:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmr9fb6mpah8k9yd6dk2a, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 43 has no snapshot at v1768047651552/18446744073709551615 shard 72075186224037889 with lowWatermark v1768047652007/18446744073709551615 (node# 1 state# Ready)" severity: 1 } } trace_id# Trying to start YDB, gRPC: 11148, MsgBus: 29175 2026-01-10T12:21:06.787183Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706907369465116:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:06.787246Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:06.833962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:06.949301Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:07.026225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:07.026320Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:07.037215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:07.062845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:07.223502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:07.223522Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:07.223528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:07.223614Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:07.629051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:07.80 ... D, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:24.456646Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706981094414015:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:24.456721Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:24.456963Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593706981094414024:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:24.457017Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:24.460996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:24.474780Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593706981094414023:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:21:24.580301Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593706981094414076:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:24.636433Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:24.738549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:25.717401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:26.402821Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593706963914543977:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:26.486695Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:27.356723Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZTA4ZjlhOTgtYmMwNDI5N2EtZDFjOTgzN2ItZDExNjBlYjI=, ActorId: [3:7593706989684356579:2962], ActorState: ExecuteState, LegacyTraceId: 01kekxndkm179fd5yh7bv46dme, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 32534, MsgBus: 28790 2026-01-10T12:21:28.746854Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707001866040503:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:28.746927Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:28.759540Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:28.836123Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:28.837508Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593707001866040475:2081] 1768047688745874 != 1768047688745877 2026-01-10T12:21:28.864066Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.864176Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.875555Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.914168Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:28.914191Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:28.914201Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:28.914280Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:29.042830Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:29.362314Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:29.753950Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:32.473548Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707019045910541:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.473548Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707019045910549:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.473666Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.474028Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707019045910556:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.474125Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:32.478309Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:32.491161Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593707019045910555:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:32.562140Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593707019045910608:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:32.626943Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.665517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.674687Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.939523Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707001866040503:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:34.034202Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:35.603279Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=Yjk3MzNjN2MtYTJmZDRlMzgtNzhiYjBlMGMtYzQ5YWI2OGM=, ActorId: [4:7593707027635853156:2962], ActorState: ExecuteState, LegacyTraceId: 01kekxnnnber8s1zwwcyh6k5cg, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] >> TxUsage::WriteToTopic_Demo_14_Table >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 19243, MsgBus: 61782 2026-01-10T12:20:44.296279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706810455543223:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.297043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.571404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.572797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.626885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.640839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.685656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.687278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706810455543092:2081] 1768047644272443 != 1768047644272446 2026-01-10T12:20:44.888670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.888702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.888710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.888786Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.894714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.308870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.509215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:45.516297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:47.545240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706823340445866:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.545278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706823340445874:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.545364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.545741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706823340445881:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.545792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.548690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.559729Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706823340445880:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.690651Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706823340445933:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.030950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.166736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.373518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706810455543223:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.378678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.430033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.843728Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706840520323179:2963], SessionActorId: [1:7593706840520323130:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7593706840520323130:2963]. 2026-01-10T12:20:51.843994Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NThhYjRjNDItYTYyNTAxNzQtNWNhNjI5MGItZjg2NmI4ZDQ=, ActorId: [1:7593706840520323130:2963], ActorState: ExecuteState, LegacyTraceId: 01kekxmawseskfsatcsk6xz2es, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7593706840520323180:2963] from: [1:7593706840520323179:2963] trace_id# 2026-01-10T12:20:51.844338Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706840520323180:2963] TxId: 281474976710665. Ctx: { TraceId: 01kekxmawseskfsatcsk6xz2es, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NThhYjRjNDItYTYyNTAxNzQtNWNhNjI5MGItZjg2NmI4ZDQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-10T12:20:51.844645Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NThhYjRjNDItYTYyNTAxNzQtNWNhNjI5MGItZjg2NmI4ZDQ=, ActorId: [1:7593706840520323130:2963], ActorState: ExecuteState, LegacyTraceId: 01kekxmawseskfsatcsk6xz2es, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 10248, MsgBus: 63429 2026-01-10T12:20:53.321696Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:53.322614Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706850204452671:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:53.322726Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:53.460206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.475584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.475670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.486869Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.493736Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.660133Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.660155Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.660162Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.660271Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.714846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:54.312440Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.326676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.337152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: ... nternal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.071338Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080087Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080143Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080159Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080179Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080194Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.080209Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087873Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087927Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087942Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087961Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087974Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.087991Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095655Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095655Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095702Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095712Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095723Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.095748Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103232Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103261Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103277Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103282Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.103292Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110875Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110899Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110939Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.110976Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118754Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118815Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118846Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118857Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118866Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.118870Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.127354Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.127412Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.127426Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.128006Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.128054Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.128103Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.136459Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.136520Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:33.136537Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:34.136675Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:34.136726Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_43_Table |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26093, MsgBus: 4866 2026-01-10T12:20:43.636275Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706807615994163:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.636430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.926196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.932646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.932736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.004181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.066071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.085567Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706807615994059:2081] 1768047643628239 != 1768047643628242 2026-01-10T12:20:44.294925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.322563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.322582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.322590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.322667Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.653889Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.917774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.308318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824795864140:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.308323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824795864108:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.308434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.308736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824795864147:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.308810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.312367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.324354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706824795864146:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:20:47.449857Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706824795864199:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.758675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:47.875884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.718411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706807615994163:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:48.719388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.089499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22885, MsgBus: 16673 2026-01-10T12:20:52.549482Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706844235085006:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:52.549830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:52.655575Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:52.835628Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:52.840373Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593706844235084871:2081] 1768047652522209 != 1768047652522212 2026-01-10T12:20:52.845098Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:52.845178Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:52.886039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:52.904167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:53.136718Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.136741Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.136750Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.136825Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.576237Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:53.729840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:53.752250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:56.605249Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861414954935:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.605419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861414954953:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.605486Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.607541Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706861414954980:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.607630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:56.608844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_cr ... d=281474976715671; 2026-01-10T12:21:28.477598Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.477740Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.477739Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.477902Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.477961Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478034Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478174Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478185Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478322Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478416Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478474Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478625Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478635Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478772Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478854Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.478923Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479092Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479097Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479236Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479282Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479485Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-10T12:21:28.479672Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; Trying to start YDB, gRPC: 6249, MsgBus: 64883 2026-01-10T12:21:30.691353Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707009535505279:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:30.691434Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:30.708931Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:30.805174Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:30.806784Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593707009535505251:2081] 1768047690690338 != 1768047690690341 2026-01-10T12:21:30.826921Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.827024Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.856531Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.907836Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:30.907863Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:30.907874Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:30.907970Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:30.951660Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:21:31.457816Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:31.462633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:21:31.698056Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:34.807865Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707026715375307:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:34.807932Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707026715375326:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:34.807966Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:34.808240Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593707026715375330:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:34.808283Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:34.811963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:34.821072Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593707026715375331:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:21:34.903032Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593707026715375383:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:34.955224Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.993049Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.839323Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707009535505279:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:35.848427Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:21:36.156739Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:37.880694Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7593707039600285327:2967], SessionActorId: [4:7593707039600285260:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[4:7593707039600285260:2967]. 2026-01-10T12:21:37.880889Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=NTQ3OGIwMzgtNTc0NDQ2ZDAtNDk3NmZhYTktNzc3NzIxZGQ=, ActorId: [4:7593707039600285260:2967], ActorState: ExecuteState, LegacyTraceId: 01kekxnqyq45j0d3d8ht4jkc3v, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7593707039600285345:2967] from: [4:7593707039600285327:2967] trace_id# 2026-01-10T12:21:37.880975Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [4:7593707039600285345:2967] TxId: 281474976710668. Ctx: { TraceId: 01kekxnqyq45j0d3d8ht4jkc3v, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NTQ3OGIwMzgtNTc0NDQ2ZDAtNDk3NmZhYTktNzc3NzIxZGQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-10T12:21:37.881210Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NTQ3OGIwMzgtNTc0NDQ2ZDAtNDk3NmZhYTktNzc3NzIxZGQ=, ActorId: [4:7593707039600285260:2967], ActorState: ExecuteState, LegacyTraceId: 01kekxnqyq45j0d3d8ht4jkc3v, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> Describe::DescribePartitionPermissions [GOOD] >> DirectReadWithServer::KillPQTablet >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [GOOD] >> AnalyzeColumnshard::AnalyzeShard >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query >> AnalyzeColumnshard::Analyze >> AnalyzeColumnshard::AnalyzeDeadline |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4232, MsgBus: 17391 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:46.347762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:46.444827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:46.453281Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:46.453762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:46.453883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:46.777944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:46.778041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:46.783889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047643426913 != 1768047643426916 2026-01-10T12:20:46.795764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:46.843703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:46.961884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:47.343638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:47.343713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:47.343753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:47.344324Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:47.356847Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:47.713204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.827141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.049261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.482388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.792694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.787103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.787558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.788726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.788909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.822538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.043888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.365344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.722209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.018228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.398259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.729733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.109140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.529455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.529690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.530253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.530336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.530417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:52.540730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:52.718931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:20:52.769596Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.680511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table. ... 74614Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:30.874697Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:31.228243Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:31.228404Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:31.257011Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1768047687091209 != 1768047687091212 2026-01-10T12:21:31.263378Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:31.312677Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:31.414710Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:31.770810Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:31.770890Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:31.770936Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:31.771382Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:31.783702Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:32.229373Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:32.262387Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.502520Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.879263Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.169847Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.708814Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1905:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.709076Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.709750Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1979:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.709806Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:33.732826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.954335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.235468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.481707Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.715647Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.986228Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.248806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.542489Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.943013Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2790:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:35.943106Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:35.943563Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:35.943631Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2797:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:35.943682Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:35.950024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:36.117379Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2799:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:21:36.174890Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2859:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:38.102068Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:38.352186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:38.753044Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 6949, MsgBus: 31701 2026-01-10T12:20:44.323282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706811224403271:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.328033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.770058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.802145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.802253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.821668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.943141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.964213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706811224403229:2081] 1768047644293324 != 1768047644293327 2026-01-10T12:20:45.070768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.136336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.136362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.136370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.136455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.323591Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.659690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.700745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824109305998:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.700843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.700970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824109306015:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.701130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706824109306019:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.701202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.704641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.714547Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706824109306021:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.807839Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706824109306072:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.134929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.245910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.437800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706811224403271:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.438048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.497939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.468198Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593706841289183173:2964], SessionActorId: [1:7593706836994215837:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7593706836994215837:2964]. 2026-01-10T12:20:51.468459Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OGU0YmY0MjItNjFjMTcyODUtZmUxZThmNzctYzk1NThjMWY=, ActorId: [1:7593706836994215837:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmakq3qe7gc9p78fnfgzt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7593706841289183174:2964] from: [1:7593706841289183173:2964] trace_id# 2026-01-10T12:20:51.468585Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593706841289183174:2964] TxId: 281474976710665. Ctx: { TraceId: 01kekxmakq3qe7gc9p78fnfgzt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGU0YmY0MjItNjFjMTcyODUtZmUxZThmNzctYzk1NThjMWY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-10T12:20:51.468927Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGU0YmY0MjItNjFjMTcyODUtZmUxZThmNzctYzk1NThjMWY=, ActorId: [1:7593706836994215837:2964], ActorState: ExecuteState, LegacyTraceId: 01kekxmakq3qe7gc9p78fnfgzt, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 E0110 12:20:52.734615366 375564 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-10T12:20:52.734426595+00:00"} Trying to start YDB, gRPC: 13196, MsgBus: 64789 2026-01-10T12:20:52.972785Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593706845389678265:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:53.008505Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:53.161604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.210972Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.232809Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.232892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.268627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:53.392782Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:53.392810Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:53.392819Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:53.392895Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:53.424175Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:54.012278Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:54.169859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.177198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:56.902529Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593706862569548234:2321], DatabaseId: /Root, PoolId: default, Failed to ... h_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.918786Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.918804Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.919526Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.919587Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.919610Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.927814Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.927882Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.927903Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.929011Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.929067Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.929083Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.937526Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.937598Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.937633Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.938420Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.938474Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.938506Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947162Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947229Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947247Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947740Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947800Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.947816Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.956338Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.956391Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:39.956404Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:40.897084Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:80;broken_lock_id=281474976715665; 2026-01-10T12:21:41.091197Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7593707012323760468:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715669; 2026-01-10T12:21:41.093176Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7593707055273442737:3540].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-10T12:21:41.093286Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7593707050978474933:3540]. 2026-01-10T12:21:41.093473Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=YmEzNzI5MjctMzEwZGMyMjgtYmZjZTAyNzUtYzAwMWIyYTg=, ActorId: [4:7593707050978474933:3540], ActorState: ExecuteState, LegacyTraceId: 01kekxntx8aeje1vf50ev6e2dg, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7593707055273442757:3540] from: [4:7593707055273442737:3540] trace_id# 2026-01-10T12:21:41.093574Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [4:7593707055273442757:3540] TxId: 281474976715669. Ctx: { TraceId: 01kekxntx8aeje1vf50ev6e2dg, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YmEzNzI5MjctMzEwZGMyMjgtYmZjZTAyNzUtYzAwMWIyYTg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-10T12:21:41.093691Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], StateRollback: unknown message 278003713 2026-01-10T12:21:41.093908Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YmEzNzI5MjctMzEwZGMyMjgtYmZjZTAyNzUtYzAwMWIyYTg=, ActorId: [4:7593707050978474933:3540], ActorState: ExecuteState, LegacyTraceId: 01kekxntx8aeje1vf50ev6e2dg, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:21:41.094233Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], StateRollback: unknown message 278003713 2026-01-10T12:21:41.094253Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], StateRollback: unknown message 278003713 2026-01-10T12:21:41.094287Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7593707055273442737:3540], SessionActorId: [4:7593707050978474933:3540], StateRollback: unknown message 278003713 2026-01-10T12:21:41.094840Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.095137Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.095251Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.095631Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.095735Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.095872Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.096003Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.096583Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.096620Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; 2026-01-10T12:21:41.096789Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715669; |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:36.653801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:36.751710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:36.757842Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:36.758374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:36.758472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:37.071413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:37.159718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:37.159818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:37.194117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:37.279744Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:37.905343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:37.906516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:37.906575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:37.906606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:37.906729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:37.970101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:38.495297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:41.318222Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:41.323960Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:41.325995Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:41.350184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:41.350284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:41.377773Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:41.379508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:41.549034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:41.549129Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:41.550242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.550680Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.551108Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.551863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.552204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.552474Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.552593Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.552777Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.552869Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:41.567779Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:41.804334Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:41.850717Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:41.850852Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:41.883801Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:41.884976Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:41.885127Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:41.885172Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:41.885218Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:41.885267Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:41.885303Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:41.885339Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:41.885815Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:41.906243Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:41.906348Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:41.929363Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:41.930436Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:41.978698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:41.981189Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:41.996259Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:41.996327Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:41.996405Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:42.005989Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:42.025178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:42.045591Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:42.045693Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:42.054529Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:42.203403Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:42.236584Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:42.496853Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:42.634694Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:42.634767Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:43.317713Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:43.499449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:43.949828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3037:3421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:43.949997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.001959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3041:3424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.002095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.003016Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3044:3427]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:44.003191Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:44.003282Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3046:3429] 2026-01-10T12:21:44.003363Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3046:3429] 2026-01-10T12:21:44.003888Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3047:3178] 2026-01-10T12:21:44.004133Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3046:3429], server id = [2:3047:3178], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:44.004296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3047:3178], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:44.004346Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:44.004537Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:44.004588Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3044:3427], StatRequests.size() = 1 2026-01-10T12:21:44.004784Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:44.018934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3051:3433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.019079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.019495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3055:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.019575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.019636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3058:3440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:44.025253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:44.144541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:44.144602Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:44.230631Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3046:3429], schemeshard count = 1 2026-01-10T12:21:44.546115Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3060:3442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:21:44.755863Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3164:3509] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:44.770827Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3187:3525]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:44.771054Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:44.771109Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3187:3525], StatRequests.size() = 1 2026-01-10T12:21:44.919478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:45.232276Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3520:3587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:45.232459Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:45.232493Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:3520:3587], StatRequests.size() = 1 2026-01-10T12:21:45.255318Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3529:3596]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:45.255475Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:21:45.255499Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:3529:3596], StatRequests.size() = 1 2026-01-10T12:21:45.355934Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3579:3430]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:45.358647Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:45.358715Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:45.359145Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:45.359196Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:45.359244Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:45.366734Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:45.371947Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:21:45.372327Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-10T12:21:45.372486Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:45.372595Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:45.372814Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3607:3446]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:45.374966Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:45.375022Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:45.375256Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:45.375292Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:45.375330Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:45.375539Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:45.378509Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-10T12:21:45.378883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-10T12:21:45.379354Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-10T12:21:45.379479Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table >> TxUsage::WriteToTopic_Demo_14_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 31591, MsgBus: 15697 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:47.694115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:47.820817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:47.829183Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:47.829496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:47.829584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:48.206398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:48.206527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:48.214137Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047644709970 != 1768047644709973 2026-01-10T12:20:48.232945Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:48.284563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:48.430102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:48.971776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:48.971850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:48.971893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:48.972509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:48.985378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:49.475026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:49.584387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.584566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.584690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.586053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.586277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:49.591070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:49.709549Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:20:49.811747Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:50.245010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:50.588720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:53.340321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:56.623524Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=4; 2026-01-10T12:20:56.635507Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:20:56.647738Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-10T12:20:56.648105Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:8793:8421], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:8759:8421]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:8793:8421].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2026-01-10T12:20:56.648767Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:8784:8421], SessionActorId: [1:8759:8421], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[1:8759:8421]. 2026-01-10T12:20:56.649286Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZDQ3MDQ4OGMtYzIzN2IzM2MtNDNkZjAyNzQtNjdlOTE4OTA=, ActorId: [1:8759:8421], ActorState: ExecuteState, LegacyTraceId: 01kekxmffz8wcpfepjdxyeqewe, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:8785:8421] from: [1:8784:8421] trace_id# 2026-01-10T12:20:56.649551Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:8785:8421] TxId: 281474976715664. Ctx: { TraceId: 01kekxmffz8wcpfepjdxyeqewe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ3MDQ4OGMtYzIzN2IzM2MtNDNkZjAyNzQtNjdlOTE4OTA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-10T12:20:56.649949Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZDQ3MDQ4OGMtYzIzN2IzM2MtNDNkZjAyNzQtNjdlOTE4OTA=, ActorId: [1:8759:8421], ActorState: ExecuteState, LegacyTraceId: 01kekxmffz8wcpfepjdxyeqewe, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 63536, MsgBus: 10286 ... waiting for SysViewsRoster update finished 2026-01-10T12:21:01.273737Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:01.285393Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:01.285728Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:01.285794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:01.555069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:01.555230Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:01.586623Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047657947267 != 1768047657947271 2026-01-10T12:21:01.591563Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:01.645141Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:01.758805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErr ... node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.263997Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.269816Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.269878Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.269902Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.274829Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.274889Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.274913Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.280211Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.280268Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.280293Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.284996Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.285054Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.285078Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.290170Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.290230Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.290253Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.295106Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.295164Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.295190Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.299753Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.299809Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.299831Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.304986Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.305046Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.305070Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.309678Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.309736Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.309758Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.381003Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxnfvh6qpg6c0m7b7z1679", SessionId: ydb://session/3?node_id=3&id=MjhiYjVlNTUtN2U5YTgzMi01YWU5YjlhNi1mOTNjMTM1MA==, Slow query, duration: 14.791467s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-10T12:21:45.580069Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;commit_tx_id=281474976710667;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-10T12:21:45.670847Z node 3 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[3:2043:3482];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037919;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976710668; 2026-01-10T12:21:45.671387Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [3:9273:8025], SessionActorId: [3:8706:8025], Got LOCKS BROKEN for table. ShardID=72075186224037919, Sink=[3:9273:8025].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-10T12:21:45.671570Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:9273:8025], SessionActorId: [3:8706:8025], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[3:8706:8025]. 2026-01-10T12:21:45.671873Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MjdjN2JiYTAtNGEzZjdjYzYtYTE0ODFiOWEtYzcyNTM1NDU=, ActorId: [3:8706:8025], ActorState: ExecuteState, LegacyTraceId: 01kekxnyv17nj6y8s6y2qpkttc, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:9839:8025] from: [3:9273:8025] trace_id# 2026-01-10T12:21:45.672639Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:9839:8025] TxId: 281474976710668. Ctx: { TraceId: 01kekxnyv17nj6y8s6y2qpkttc, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MjdjN2JiYTAtNGEzZjdjYzYtYTE0ODFiOWEtYzcyNTM1NDU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-10T12:21:45.673312Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MjdjN2JiYTAtNGEzZjdjYzYtYTE0ODFiOWEtYzcyNTM1NDU=, ActorId: [3:8706:8025], ActorState: ExecuteState, LegacyTraceId: 01kekxnyv17nj6y8s6y2qpkttc, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TxUsage::WriteToTopic_Demo_14_Query >> TResourcePoolTest::ParallelCreateSameResourcePool >> TResourcePoolTest::ReadOnlyMode >> TResourcePoolTest::CreateResourcePool >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] >> TResourcePoolTest::ParallelCreateResourcePool >> TxUsage::WriteToTopic_Demo_24_Table >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:39.319911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:39.400166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:39.405561Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:39.405914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:39.406021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:39.768055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:39.864682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:39.864800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:39.898035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:39.995724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:40.623955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:40.624502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:40.624549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:40.624582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:40.625120Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:40.690295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:41.226583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:43.669307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:43.674713Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:43.677153Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:43.697106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.697184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.724679Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:43.726181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.886682Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.886789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.888239Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.889042Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.889703Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.890686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.890801Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.891029Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.891156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.891327Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.891491Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:43.906095Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:44.118449Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:44.135877Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:44.135960Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:44.158968Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:44.160276Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:44.160496Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:44.160572Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:44.160632Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:44.160688Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:44.160736Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:44.160785Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:44.161337Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:44.161842Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1913:2455] 2026-01-10T12:21:44.165644Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:21:44.171642Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Describe result: PathErrorUnknown 2026-01-10T12:21:44.171696Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Creating table 2026-01-10T12:21:44.171755Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:44.183094Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2138:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:44.189399Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2120:2578] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-10T12:21:44.195850Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-10T12:21:44.198410Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:44.198489Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2148:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:44.224008Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2221:2651] 2026-01-10T12:21:44.224346Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2221:2651], schemeshard id = 72075186224037897 2026-01-10T12:21:44.336627Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:44.463035Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:21:44.623294Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:44.624813Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2564:2879], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:44.627191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:44.629770Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:44.629835Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2111:2575] Owner: [2:2110:2574]. Subscribe on create table tx: 2814749767 ... ror: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.222377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.636058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3740:3628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.636457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.636968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3744:3631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.637040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.638038Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3747:3634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:47.638217Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:47.638373Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:21:47.638417Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3750:3637] 2026-01-10T12:21:47.638478Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3750:3637] 2026-01-10T12:21:47.639035Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3751:3564] 2026-01-10T12:21:47.639188Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3750:3637], server id = [2:3751:3564], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:47.639415Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3751:3564], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:47.639465Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:47.639662Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:47.639723Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3747:3634], StatRequests.size() = 1 2026-01-10T12:21:47.654823Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:47.655199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3755:3641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.655404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.655916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3759:3645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.655991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.656045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3762:3648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:47.661336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:47.770338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:47.770446Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:47.791998Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3750:3637], schemeshard count = 1 2026-01-10T12:21:48.048113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3764:3650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:21:48.308351Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3873:3714] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:48.319978Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3896:3730]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:48.320156Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:48.320192Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3896:3730], StatRequests.size() = 1 2026-01-10T12:21:48.451273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:48.861318Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4231:3802]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:48.861513Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:48.861555Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4231:3802], StatRequests.size() = 1 2026-01-10T12:21:48.884206Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4240:3811]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:48.884370Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:21:48.884406Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4240:3811], StatRequests.size() = 1 2026-01-10T12:21:48.967765Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4281:3808]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:48.970432Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:48.970486Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:48.970947Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:48.970989Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:21:48.971038Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:48.978867Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:48.984170Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:21:48.984442Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-10T12:21:48.984636Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:48.984694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:48.984962Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4309:3824]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:48.987216Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:48.987261Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:48.987712Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:48.987755Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:21:48.987795Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:48.987999Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:48.990110Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-10T12:21:48.990749Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-10T12:21:48.990870Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-10T12:21:48.991066Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] >> TResourcePoolTest::CreateResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:50.041003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:50.041080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:50.041138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:50.041163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:50.041183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:50.041235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:50.041977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:50.042951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:50.116378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:50.116417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:50.127973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:50.128048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:50.130523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:50.139129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:50.139317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:50.143370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:50.145650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:50.155606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.156368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:50.164085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:50.164283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:50.164428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:50.164709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:50.282854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-10T12:21:50.786491Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 190us result status StatusSuccess 2026-01-10T12:21:50.786801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:50.788505Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:50.788676Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 165us result status StatusSuccess 2026-01-10T12:21:50.788979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-10T12:21:50.789217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-10T12:21:50.789321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-10T12:21:50.789383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-10T12:21:50.789397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-10T12:21:50.789436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-10T12:21:50.789464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-10T12:21:50.789912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:21:50.789989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:21:50.790025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:765:2755] 2026-01-10T12:21:50.790187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:21:50.790218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:21:50.790231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:765:2755] 2026-01-10T12:21:50.790281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:21:50.790347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:21:50.790363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:765:2755] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-10T12:21:50.790666Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:50.790806Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 147us result status StatusSuccess 2026-01-10T12:21:50.791050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-10T12:21:50.794612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "NilNoviSubLuna" } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:21:50.794820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 128:0, path# /MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna 2026-01-10T12:21:50.794986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:21:50.796705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-10T12:21:50.796903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:50.041003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:50.041099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:50.041170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:50.041198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:50.041219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:50.041263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:50.041960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:50.042978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:50.116027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:50.116090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:50.128068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:50.128175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:50.130508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:50.138922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:50.139202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:50.143372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:50.145648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:50.155740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.156379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:50.164102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:50.164287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:50.164418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:50.164704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:50.288187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.289926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.290038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.290117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.290208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.290286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.199999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:21:51.200141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:21:51.200266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:51.200305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:51.201032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:51.201579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:51.202219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:21:51.203259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:51.203284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:21:51.203382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:51.203429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:51.203513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:51.203548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-10T12:21:51.203578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-10T12:21:51.203596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-10T12:21:51.203740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:21:51.203772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:21:51.203841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:21:51.203865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:51.203894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:21:51.203913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:51.203941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:21:51.203973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:51.203996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:21:51.204021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:21:51.204068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:21:51.204117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:21:51.204147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-10T12:21:51.204177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-10T12:21:51.204800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:51.204865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:51.204889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:21:51.204920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-10T12:21:51.204950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:21:51.205790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:51.205905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:51.205932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:21:51.205953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-10T12:21:51.205975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:51.206025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:21:51.207640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:51.208279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:21:51.208432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:21:51.208458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:21:51.208742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:21:51.208800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:21:51.208823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:759:2749] TestWaitNotification: OK eventTxId 102 2026-01-10T12:21:51.209185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:51.209349Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 165us result status StatusSuccess 2026-01-10T12:21:51.209625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:50.041009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:50.041092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:50.041158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:50.041190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:50.041218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:50.041268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.041326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:50.042082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:50.042960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:50.116030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:50.116098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:50.129012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:50.129086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:50.130529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:50.139058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:50.139290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:50.143320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:50.145656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:50.155354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.156355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:50.164029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.164165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:50.164197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:50.164334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:50.164554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:50.282845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.284988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.285348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-10T12:21:51.206597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-10T12:21:51.206684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:21:51.207326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.207415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.207450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:21:51.207483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-10T12:21:51.207525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:21:51.208325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.208422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.208452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:21:51.208478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 2 2026-01-10T12:21:51.208508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:21:51.208600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-10T12:21:51.210573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-10T12:21:51.210729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-10T12:21:51.211350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-10T12:21:51.212247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:51.212360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.212406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-10T12:21:51.212496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-10T12:21:51.212611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:21:51.212651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:21:51.213071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-01-10T12:21:51.214156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:51.214181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:51.214350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:21:51.214421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:51.214442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:908:2866], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-10T12:21:51.214480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:908:2866], at schemeshard: 72057594046678944, txId: 129, path id: 43 2026-01-10T12:21:51.214648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-10T12:21:51.214672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-10T12:21:51.214740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:21:51.214762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:21:51.214787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-10T12:21:51.214816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:21:51.214846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-10T12:21:51.214887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-10T12:21:51.214911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-10T12:21:51.214938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-10T12:21:51.214989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:21:51.215016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-10T12:21:51.215038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2026-01-10T12:21:51.215057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 43], 3 2026-01-10T12:21:51.215476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.215524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.215544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:21:51.215573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-10T12:21:51.215611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:21:51.215990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.216101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-10T12:21:51.216133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-10T12:21:51.216153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-10T12:21:51.216177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:21:51.216220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-10T12:21:51.217925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-10T12:21:51.218859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 2088, MsgBus: 3432 2026-01-10T12:20:44.401607Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706812168368408:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:44.401635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:44.514375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:44.777913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:44.778016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.780134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.798440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:44.852231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706812168368384:2081] 1768047644399236 != 1768047644399239 2026-01-10T12:20:44.860924Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:45.040764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:45.040795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:45.040804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:45.040910Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:45.093365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:45.476372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:45.596945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:47.968303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825053271143:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.968459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.973457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825053271172:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.973555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706825053271173:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.974071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.983382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.996571Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706825053271177:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:48.103754Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706829348238524:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:48.403608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:48.544271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:49.572456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706812168368408:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.575795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:20:49.849193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 16998, MsgBus: 4449 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:56.444369Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:56.454728Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:56.455077Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:56.455147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:56.772398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:56.772544Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:56.809271Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047653087532 != 1768047653087536 2026-01-10T12:20:56.814444Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:56.862948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:56.999231Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:57.396344Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:57.396414Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:57.396453Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:57.396975Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:57.414828Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:57.856771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:57.946438Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:915:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.946560Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.947048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.947937Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.948040Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.850951Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.856835Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.856929Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.856967Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.863775Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.863863Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.863900Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.870885Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.870974Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.871013Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.877913Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.878004Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.878044Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.884532Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.884635Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.884676Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.891000Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.891070Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.891099Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.897781Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.897849Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.897878Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.904550Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.904635Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.904677Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.911302Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.911394Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.911431Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.918384Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.918474Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.918512Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.924836Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.924902Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.924930Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.930994Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.931059Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.931087Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.937540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.937622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:43.937652Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:44.017381Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxnengfqhk6xxeqpwecs8z", SessionId: ydb://session/3?node_id=3&id=YmQ4YzExNWQtYmJiNmY1YmYtZWIzMzJmMTAtZDE5NTU4MDE=, Slow query, duration: 15.644476s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] >> DirectReadWithServer::KillPQTablet [GOOD] >> DirectReadWithServer::KillPQRBTablet [GOOD] >> LocalPartition::Restarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:50.756402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:50.756468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.756499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:50.756547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:50.756581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:50.756605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:50.756655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:50.756711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:50.757396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:50.757604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:50.820568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:50.820617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:50.832419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:50.832498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:50.832656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:50.839192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:50.839389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:50.839976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:50.840208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:50.841973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.842141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:50.843145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:50.843191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:50.843241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:50.843286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:50.843393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:50.843711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:50.949227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.949956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:50.950913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.907595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:51.907788Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 211us result status StatusSuccess 2026-01-10T12:21:51.908166Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.908928Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:51.909118Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools" took 184us result status StatusSuccess 2026-01-10T12:21:51.909562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools" PathDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.951739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:51.952003Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" took 284us result status StatusSuccess 2026-01-10T12:21:51.952376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" PathDescription { Self { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:51.952874Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:51.953047Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 181us result status StatusSuccess 2026-01-10T12:21:51.953319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] >> TResourcePoolTest::AlterResourcePool >> TResourcePoolTest::ParallelAlterResourcePool >> TResourcePoolTest::SchemeErrors >> TResourcePoolTest::DropResourcePoolTwice >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] >> AnalyzeColumnshard::AnalyzeShard [GOOD] >> TxUsage::WriteToTopic_Demo_43_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 23499, MsgBus: 16411 2026-01-10T12:20:43.591775Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706804740594467:2190];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:43.592249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:43.928168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:43.986609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:43.986705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:44.025867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:44.160786Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706804740594305:2081] 1768047643556515 != 1768047643556518 2026-01-10T12:20:44.168090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:44.201678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:44.372469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:44.372494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:44.372501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:44.372601Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:44.596846Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:44.923272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:44.940201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:47.145524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821920464393:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.145637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821920464385:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.145767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.146605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706821920464400:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.146660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:47.149872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:47.161354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706821920464399:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:20:47.281199Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706821920464452:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:47.587250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:47.790769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.791002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.791259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.791392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.791476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.791631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.791728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.791834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.791943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:47.792047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:47.792811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:47.792949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:47.793060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7593706821920464741:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:47.793826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:47.793874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:47.796050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:47.796271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:47.796398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:47.796513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:47.796592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:47.796707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7593706821920464625:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:47.796871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... 6224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.108039Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.108042Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.108098Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116417Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116418Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116454Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116465Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116467Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.116479Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124694Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124694Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124731Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124742Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124746Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.124754Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133178Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133178Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133228Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133228Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133243Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.133243Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141350Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141413Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141428Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141469Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141504Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.141537Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149849Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149867Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149912Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149913Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.149949Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158395Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158453Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158467Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158868Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158909Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.158921Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:21:48.624565Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-10T12:21:48.713580Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7593707086943616878:3623], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxp2eq4y28970f43ptx8n1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjBmODI1ODQtOTc5NzVmNTItZDE5ZTdjYmQtZmFkNTRhMDY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-10T12:21:48.713647Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7593707086943616878:3623], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxp2eq4y28970f43ptx8n1. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjBmODI1ODQtOTc5NzVmNTItZDE5ZTdjYmQtZmFkNTRhMDY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-10T12:21:48.713979Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=MjBmODI1ODQtOTc5NzVmNTItZDE5ZTdjYmQtZmFkNTRhMDY=, ActorId: [4:7593707086943616620:3539], ActorState: ExecuteState, LegacyTraceId: 01kekxp2eq4y28970f43ptx8n1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TResourcePoolTest::AlterResourcePool [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:52.993939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:52.994069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:52.994100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:52.994124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:52.994156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:52.994178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:52.994220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:52.994270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:52.994902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:52.995099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:53.063340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:53.063397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.075643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:53.075726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:53.075892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:53.082647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:53.082853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:53.083337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:53.083515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:53.085324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.085444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:53.086255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.086314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.086354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:53.086382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:53.086460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:53.086643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.214545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.215913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-10T12:21:53.676893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-10T12:21:53.676980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:21:53.677082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:53.677135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:53.677763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:53.678252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:53.678810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:21:53.679790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.679816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:21:53.679925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:53.679978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:53.680058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.680103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-10T12:21:53.680130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-10T12:21:53.680145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-10T12:21:53.680294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.680323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:21:53.680391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:21:53.680412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:53.680444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:21:53.680484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:53.680534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:21:53.680566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:21:53.680600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:21:53.680621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:21:53.680682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:21:53.680712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:21:53.680734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-10T12:21:53.680758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-10T12:21:53.681406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:53.681461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:53.681489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:21:53.681516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-10T12:21:53.681541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:21:53.682672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:53.682745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:21:53.682778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:21:53.682796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-10T12:21:53.682815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:53.682861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:21:53.684427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:21:53.685011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:21:53.685139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:21:53.685164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:21:53.685441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:21:53.685494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:21:53.685517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:759:2749] TestWaitNotification: OK eventTxId 102 2026-01-10T12:21:53.685918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:53.686074Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 173us result status StatusSuccess 2026-01-10T12:21:53.686379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { Properties { key: "concurrent_query_limit" value: "10" } Properties { key: "query_cancel_after_seconds" value: "60" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy >> TResourcePoolTest::SchemeErrors [GOOD] >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:43.684989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:43.756855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:43.762005Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:43.762276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:43.762329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:44.044410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:44.121904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:44.122019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:44.155612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:44.219069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:44.840354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:44.841034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:44.841068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:44.841090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:44.841542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:44.903535Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:45.422567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:48.225724Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:48.231078Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:48.233333Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:48.260835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.260964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.299348Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:48.301928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:48.443686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.444223Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.444694Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445253Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445336Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445528Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445643Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445724Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.445853Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.596847Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:48.630675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.630762Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.643519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:48.800673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:48.830696Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:48.830772Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:48.850284Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:48.851327Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:48.851511Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:48.851552Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:48.851586Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:48.851622Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:48.851671Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:48.851708Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:48.852191Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:48.960879Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:48.960981Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:48.975190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:21:48.975444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:49.132333Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:21:49.132691Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:49.134789Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:49.134845Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:21:49.134934Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:49.137369Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:49.139848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:49.145329Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:49.145415Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:49.154711Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:49.319759Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:49.526314Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:49.644432Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:49.644539Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:21:50.263811Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:50.488319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2775:3378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:50.488550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:50.489149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:3383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:50.489255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:50.509983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:21:50.613096Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:21:50.613272Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:21:50.613454Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:21:50.613529Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:21:50.613605Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:21:50.613681Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:21:50.613746Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:21:50.613811Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:21:50.613902Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:21:50.613981Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:21:50.614069Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:21:50.614168Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:21:50.614249Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:21:50.641644Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:21:50.641773Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:21:50.641987Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:21:50.642040Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:21:50.642253Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:21:50.642305Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:21:50.642435Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:21:50.642491Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:21:50.642567Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:21:50.642609Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:21:50.642663Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:21:50.642702Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:21:50.642926Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:21:50.643005Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:21:50.643143Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:21:50.643187Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:21:50.643246Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:21:50.643339Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:21:50.643416Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:21:50.643457Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:21:50.643638Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:21:50.643691Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-10T12:21:50.643800Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-10T12:21:50.643839Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-10T12:21:50.701623Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2847:3027];ev=NActors::IEventHandle;tablet_id=72075186224037899;tx_id=281474976710659;this=137042708798720;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=2969;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=121:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:21:50.733450Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:21:50.733541Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:21:50.733575Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000018s |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] >> TResourcePoolTest::DropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:53.630416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:53.630499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.630528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:53.630554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:53.630587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:53.630611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:53.630658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.630716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:53.631366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:53.631563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:53.688741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:53.688781Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.699004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:53.699076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:53.699233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:53.704835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:53.705021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:53.705516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:53.705685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:53.707280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.707393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:53.708208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.708251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.708290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:53.708320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:53.708394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:53.708622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.828142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.829934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.830032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.830128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.830207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TTxNotificationSubscriber for txId 114: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.413472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-10T12:21:54.413556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-10T12:21:54.413578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.413590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.413678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-10T12:21:54.413747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.413764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.413792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-10T12:21:54.413869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-10T12:21:54.413893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.413906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.413966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.414913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-01-10T12:21:54.414982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.414997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.415073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.415088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.415170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.415185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.415244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.415255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.415329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.415353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:845:2835] 2026-01-10T12:21:54.415430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.415460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:845:2835] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-01-10T12:21:54.418315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:54.418464Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 166us result status StatusSuccess 2026-01-10T12:21:54.418772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 Properties { Properties { key: "concurrent_query_limit" value: "20" } Properties { key: "query_cancel_after_seconds" value: "60" } Properties { key: "query_count_limit" value: "50" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:53.814726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:53.814809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.814844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:53.814871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:53.814899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:53.814921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:53.814970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.815029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:53.815717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:53.815937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:53.879792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:53.879845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.890294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:53.890384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:53.890533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:53.897031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:53.897229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:53.897774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:53.897969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:53.899597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.899722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:53.900592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.900635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.900678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:53.900710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:53.900783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:53.900989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.996961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.997958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.998362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:21:54.461224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-10T12:21:54.461333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-10T12:21:54.461356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 124 2026-01-10T12:21:54.461378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 124, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:21:54.461396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:54.461459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 124, subscribers: 0 2026-01-10T12:21:54.463201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-10T12:21:54.463449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-10T12:21:54.464269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-10T12:21:54.464331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 TestModificationResult got TxId: 124, wait until txId: 124 TestWaitNotification wait txId: 124 2026-01-10T12:21:54.464502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 124: send EvNotifyTxCompletion 2026-01-10T12:21:54.464535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 124 2026-01-10T12:21:54.464823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-10T12:21:54.464893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.464920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:728:2718] TestWaitNotification: OK eventTxId 124 TestModificationResults wait txId: 125 2026-01-10T12:21:54.467057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:21:54.467301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/AnotherDir, operationId: 125:0, at schemeshard: 72057594046678944 2026-01-10T12:21:54.467437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:21:54.467486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 0 2026-01-10T12:21:54.467515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 125:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 41] source path: 2026-01-10T12:21:54.467567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:21:54.467623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 125:1, path# /MyRoot/AnotherDir/MyResourcePool 2026-01-10T12:21:54.467698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-10T12:21:54.469370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 125:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 2026-01-10T12:21:54.469527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 125:0, at schemeshard: 72057594046678944 2026-01-10T12:21:54.471295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 125, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:54.471448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2026-01-10T12:21:54.473511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:21:54.473773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/.metadata/workload_manager/pools/AnotherDir, operationId: 126:0, at schemeshard: 72057594046678944 2026-01-10T12:21:54.473897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 40], parent name: pools, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-10T12:21:54.473952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 0 2026-01-10T12:21:54.473987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 126:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 42] source path: 2026-01-10T12:21:54.474064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:21:54.474105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 126:1, path# /MyRoot/.metadata/workload_manager/pools/AnotherDir/MyResourcePool 2026-01-10T12:21:54.474188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-10T12:21:54.475774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 126:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 2026-01-10T12:21:54.475886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 126:0, at schemeshard: 72057594046678944 2026-01-10T12:21:54.477519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:54.477693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-10T12:21:54.479687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "" } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:21:54.479813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 127:0, path# /MyRoot/.metadata/workload_manager/pools/ 2026-01-10T12:21:54.479931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-10T12:21:54.481609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/\', error: path part shouldn\'t be empty" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:54.481823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, operation: CREATE RESOURCE POOL, path: TestModificationResult got TxId: 127, wait until txId: 127 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:53.425970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:53.426066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.426117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:53.426144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:53.426170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:53.426192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:53.426244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.426290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:53.426952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:53.427181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:53.487476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:53.487516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.499907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:53.500011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:53.500242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:53.506654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:53.506844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:53.507322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:53.507484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:53.509046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.509162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:53.509974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.510015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.510057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:53.510086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:53.510158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:53.510343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.616807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.617946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.618005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.618062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.618102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... is done id#101:0 progress is 3/3 2026-01-10T12:21:54.828888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:21:54.828906Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-10T12:21:54.828927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:21:54.828966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-10T12:21:54.828987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-10T12:21:54.829017Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:21:54.829052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:21:54.829096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:21:54.829119Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-10T12:21:54.829132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-10T12:21:54.829150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:21:54.829163Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-10T12:21:54.829174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-10T12:21:54.829188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:21:54.829204Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-10T12:21:54.829230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:21:54.829250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-10T12:21:54.829263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 5 2026-01-10T12:21:54.829275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-10T12:21:54.830021Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.830073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.830093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:21:54.830128Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:21:54.830160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:21:54.830882Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.830929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.830945Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:21:54.830964Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-10T12:21:54.830982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:21:54.831669Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.831720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.831736Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:21:54.831754Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-10T12:21:54.831771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:21:54.832460Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.832505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:21:54.832522Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:21:54.832538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:21:54.832556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:54.832612Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:21:54.833930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:21:54.833975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:21:54.834782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:21:54.834836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:21:54.834976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:21:54.835021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:21:54.835252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:21:54.835311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.835341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:726:2715] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:21:54.837409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpAlterResourcePool CreateResourcePool { Name: "MyResourcePool" Properties { Properties { key: "concurrent_query_limit" value: "20" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:21:54.837584Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_resource_pool.cpp:123: [72057594046678944] TAlterResourcePool Propose: opId# 102:0, path# /MyRoot/.metadata/workload_manager/pools/MyResourcePool 2026-01-10T12:21:54.837725Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), at schemeshard: 72057594046678944 2026-01-10T12:21:54.839082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:21:54.839255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), operation: ALTER RESOURCE POOL, path: MyResourcePool TestModificationResult got TxId: 102, wait until txId: 102 >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:53.788767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:53.788838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.788868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:53.788898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:53.788933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:53.788959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:53.789004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:53.789050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:53.789674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:53.789885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:53.847014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:53.847055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.856780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:53.856845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:53.857013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:53.863413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:53.863599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:53.864135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:53.864311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:53.865827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.865939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:53.866714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:53.866752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:53.866795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:53.866821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:53.866894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:53.867076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:53.969341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.970944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:53.971436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:54.948149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:21:54.948253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:21:54.948307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:54.948406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:54.948433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:21:54.948477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-10T12:21:54.948496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-01-10T12:21:54.948624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:21:54.948647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:21:54.948734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:21:54.948755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:54.948781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:21:54.948812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:54.948842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:21:54.948876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:54.948905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:21:54.948927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:21:54.948970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:54.948994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-10T12:21:54.949027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-10T12:21:54.949048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-10T12:21:54.949062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-10T12:21:54.949502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.949557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.949577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:54.949605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-10T12:21:54.949637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:21:54.949863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:21:54.949890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:21:54.949933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:21:54.950137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.950192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.950209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:54.950229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-10T12:21:54.950249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:21:54.950878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.950978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:54.951011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:54.951032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-10T12:21:54.951053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:54.951100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:21:54.953458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:21:54.953682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:21:54.954047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:21:54.954312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:21:54.954557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:21:54.954596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:21:54.954866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:21:54.954935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:21:54.954966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:783:2773] TestWaitNotification: OK eventTxId 103 2026-01-10T12:21:54.955324Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:54.955493Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 170us result status StatusPathDoesNotExist 2026-01-10T12:21:54.955636Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:42.636317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:42.706253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:42.711549Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:42.711842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:42.711897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:43.005493Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:43.095444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.095585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.129569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.205507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:43.794638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:43.795040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:43.795074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:43.795100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:43.795389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:43.859523Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:44.395856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:46.974173Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.980361Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:46.983741Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:47.008522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.008642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.036309Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:47.038423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.224511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.224597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.225715Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.226228Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.226711Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227252Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227481Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227582Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227715Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227847Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.227935Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.242100Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.440438Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.466852Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:47.466979Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:47.509436Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:47.511006Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:47.511235Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:47.511298Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:47.511355Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:47.511423Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:47.511487Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:47.511538Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:47.512168Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:47.524875Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.525000Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.549191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:21:47.550596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:47.607022Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2230:2653] 2026-01-10T12:21:47.609970Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:21:47.624200Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Describe result: PathErrorUnknown 2026-01-10T12:21:47.624257Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Creating table 2026-01-10T12:21:47.624325Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:47.632050Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:47.635651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.642257Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:47.642375Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:47.653078Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:47.807930Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:47.878664Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:21:48.149496Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:48.278414Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:48.278497Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2244:2662] Owner: [2:2240:2661]. Column diff is empty, finishing 2026-01-10T12:21:48.959840Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.856166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.856834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4417:3831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.856892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.857992Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:4420:3834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:52.858135Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:52.858329Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:21:52.858381Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:4423:3837] 2026-01-10T12:21:52.858448Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:4423:3837] 2026-01-10T12:21:52.859021Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4424:3931] 2026-01-10T12:21:52.859225Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:4423:3837], server id = [2:4424:3931], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:52.859413Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4424:3931], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:52.859472Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:52.859670Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:52.859728Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:4420:3834], StatRequests.size() = 1 2026-01-10T12:21:52.875165Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:52.875767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4428:3841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.875835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.876347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4432:3845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.876444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.876551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4435:3848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:52.882185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:53.051422Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:53.051496Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:53.095446Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:4423:3837], schemeshard count = 1 2026-01-10T12:21:53.336576Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4437:3850], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-10T12:21:53.468141Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:4551:3923] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:53.479509Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:4574:3939]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:53.479656Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:53.479697Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:4574:3939], StatRequests.size() = 1 2026-01-10T12:21:53.613024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:53.948734Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4917:4003]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:53.948889Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:53.949230Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2026-01-10T12:21:53.949272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-10T12:21:53.949487Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-10T12:21:53.949533Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4917:4003], StatRequests.size() = 1 2026-01-10T12:21:53.960651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:53.971774Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4926:4012]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:53.971934Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:21:53.971970Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4926:4012], StatRequests.size() = 1 2026-01-10T12:21:54.043570Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4969:4180]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:54.045815Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:54.045866Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:54.046258Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:54.046293Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:21:54.046329Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:54.065618Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:54.070482Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:21:54.070738Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-10T12:21:54.070919Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:54.070972Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-10T12:21:54.071218Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4997:4196]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:21:54.074010Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:54.074078Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:54.074365Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:54.074403Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:21:54.074441Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:54.074616Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:21:54.076626Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-10T12:21:54.077046Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-10T12:21:54.077206Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-10T12:21:54.077360Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TResourcePoolTest::DropResourcePool [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:55.315230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:55.315314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:55.315352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:55.315409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:55.315453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:55.315483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:55.315559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:55.315624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:55.316504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:55.316792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:55.375865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:55.375914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:55.385747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:55.385822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:55.385975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:55.392808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:55.393053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:55.393575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:55.393780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:55.395429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:55.395552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:55.396413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:55.396457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:55.396501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:55.396527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:55.396600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:55.396796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:55.518754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.519732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.519850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.519928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:55.520793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:55.960848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:21:55.960935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:21:55.960971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:21:55.961069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:55.961092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:21:55.961116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-10T12:21:55.961141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-01-10T12:21:55.961283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:21:55.961309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:21:55.961371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:21:55.961395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:55.961420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:21:55.961440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:55.961464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:21:55.961489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:21:55.961521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:21:55.961545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:21:55.961596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:21:55.961629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-10T12:21:55.961655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-10T12:21:55.961676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-10T12:21:55.961715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-10T12:21:55.962078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.962137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.962166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:55.962203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-10T12:21:55.962233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:21:55.962508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:21:55.962537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:21:55.962579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:21:55.962839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.962880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.962901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:55.962927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-10T12:21:55.962945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:21:55.963699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.963766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:21:55.963798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:21:55.963820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-10T12:21:55.963852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:21:55.963914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:21:55.966028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:21:55.966202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:21:55.966758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:21:55.967041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:21:55.967219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:21:55.967244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:21:55.967518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:21:55.967579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:21:55.967601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:790:2780] TestWaitNotification: OK eventTxId 103 2026-01-10T12:21:55.967917Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:21:55.968091Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 171us result status StatusPathDoesNotExist 2026-01-10T12:21:55.968266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |92.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:21.432569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:21.547373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:21.554805Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:21.555199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:21.555311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:21.942682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:22.044296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:22.044433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:22.078503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:22.155064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:22.822590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:22.823695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:22.823752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:22.823790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:22.824515Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:22.889811Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:23.466973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:26.784001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:26.791680Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:26.795116Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:26.826440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:26.826567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:26.865354Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:26.868287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.085002Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.085737Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.086378Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.087142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.087271Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.087548Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.087692Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.087796Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.088000Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.214802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:27.294629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.294775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.309792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.485701Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.532322Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:27.532428Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:27.565947Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:27.567607Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:27.567806Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:27.567864Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:27.567916Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:27.567970Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:27.568030Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:27.568109Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:27.568792Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:27.694421Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.694519Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.716593Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:21:27.716942Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:27.881582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:21:27.881980Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:27.884288Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:27.884346Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:21:27.884426Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:27.888035Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:27.891369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:27.898750Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:27.898857Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:27.911166Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:28.133008Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:28.341039Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:28.462692Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:28.462786Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:21:29.085944Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 170Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5488:4721] 2026-01-10T12:21:54.105268Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5487:4720], server id = [2:5488:4721], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:54.105381Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5488:4721], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:54.105440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:21:54.105608Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:21:54.105683Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5485:4718], StatRequests.size() = 1 2026-01-10T12:21:54.105766Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:54.223918Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5475:4708], ActorId: [2:5476:4709], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWJhNmI1YTMtOTU2N2FhNmMtODkxOGM0ZDctNTI4ZTU1ODA=, TxId: 2026-01-10T12:21:54.224011Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5475:4708], ActorId: [2:5476:4709], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWJhNmI1YTMtOTU2N2FhNmMtODkxOGM0ZDctNTI4ZTU1ODA=, TxId: 2026-01-10T12:21:54.224435Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5474:4707], ActorId: [2:5475:4708], Got response [2:5476:4709] SUCCESS 2026-01-10T12:21:54.224716Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:54.238035Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-10T12:21:54.238098Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:21:54.417532Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:54.417619Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:54.439269Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5487:4720], schemeshard count = 1 2026-01-10T12:21:55.109020Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:21:55.109096Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:21:55.109126Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:21:55.112838Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:21:55.140725Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:21:55.141277Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:21:55.141345Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:21:55.142331Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:21:55.178085Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:55.178282Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:21:55.179121Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5569:4765], server id = [2:5573:4769], tablet id = 72075186224037899, status = OK 2026-01-10T12:21:55.179402Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5569:4765], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.179628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5570:4766], server id = [2:5574:4770], tablet id = 72075186224037900, status = OK 2026-01-10T12:21:55.179671Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5570:4766], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.180331Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5571:4767], server id = [2:5575:4771], tablet id = 72075186224037901, status = OK 2026-01-10T12:21:55.180373Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5571:4767], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.180896Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5572:4768], server id = [2:5576:4772], tablet id = 72075186224037902, status = OK 2026-01-10T12:21:55.180942Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5572:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.185546Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:21:55.186054Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5569:4765], server id = [2:5573:4769], tablet id = 72075186224037899 2026-01-10T12:21:55.186092Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.186483Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:21:55.187137Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5570:4766], server id = [2:5574:4770], tablet id = 72075186224037900 2026-01-10T12:21:55.187172Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.188042Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:21:55.188782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5571:4767], server id = [2:5575:4771], tablet id = 72075186224037901 2026-01-10T12:21:55.188816Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.188977Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:21:55.189010Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:55.189191Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:55.189331Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:55.189728Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5589:4781], ActorId: [2:5590:4782], Starting query actor #1 [2:5591:4783] 2026-01-10T12:21:55.189786Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5590:4782], ActorId: [2:5591:4783], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:21:55.191239Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5572:4768], server id = [2:5576:4772], tablet id = 72075186224037902 2026-01-10T12:21:55.191263Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.191788Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5590:4782], ActorId: [2:5591:4783], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzZiYTljMTMtOTU5YTYyYWUtYzNmNDhlZjgtZTBmNWZkNjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:21:55.266415Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5600:4792]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.266590Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:55.266639Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5600:4792], StatRequests.size() = 1 2026-01-10T12:21:55.380585Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5590:4782], ActorId: [2:5591:4783], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzZiYTljMTMtOTU5YTYyYWUtYzNmNDhlZjgtZTBmNWZkNjI=, TxId: 2026-01-10T12:21:55.380649Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5590:4782], ActorId: [2:5591:4783], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzZiYTljMTMtOTU5YTYyYWUtYzNmNDhlZjgtZTBmNWZkNjI=, TxId: 2026-01-10T12:21:55.380923Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5589:4781], ActorId: [2:5590:4782], Got response [2:5591:4783] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:21:55.381297Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5613:4798]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.381477Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:55.381895Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.381934Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:55.382663Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.382713Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:55.382752Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:55.386070Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:18.370182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:18.484072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:18.492892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:18.493145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:18.493300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:18.986079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:19.090951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:19.091082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:19.151270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:19.262906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:20.021713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:20.022918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:20.022981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:20.023014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:20.023384Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:20.083444Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:20.677811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:23.554855Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:23.560703Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:23.563499Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:23.605556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:23.605677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:23.645066Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:23.647481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:23.846313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:23.846423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:23.848091Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.848758Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.849465Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.850288Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.850640Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.850843Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.851013Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.851210Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.851374Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:23.867356Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:24.109770Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:24.191705Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:24.191893Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:24.234035Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:24.234840Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:24.235102Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:24.235173Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:24.235229Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:24.235298Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:24.235375Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:24.235443Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:24.236042Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:24.248423Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:24.248532Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:24.278557Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:21:24.278899Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:21:24.348648Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:21:24.351283Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:24.365527Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:21:24.365604Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:21:24.365701Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:24.372648Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:24.377158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:24.395612Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:24.395788Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:24.413469Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:24.570285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:24.658290Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:25.032509Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:25.144872Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:25.144947Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:21:25.775509Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 605Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5444:4712], server id = [2:5445:4713], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:53.647662Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5445:4713] 2026-01-10T12:21:53.647761Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5445:4713], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:53.647810Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:21:53.647916Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:21:53.647980Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5442:4710], StatRequests.size() = 1 2026-01-10T12:21:53.648037Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:53.744511Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5432:4700], ActorId: [2:5433:4701], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDE3NWE2ZTMtYjM4MjYyNy02ZWI3OWQxZS05ODQ4NGY0YQ==, TxId: 2026-01-10T12:21:53.744576Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5432:4700], ActorId: [2:5433:4701], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDE3NWE2ZTMtYjM4MjYyNy02ZWI3OWQxZS05ODQ4NGY0YQ==, TxId: 2026-01-10T12:21:53.744866Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5431:4699], ActorId: [2:5432:4700], Got response [2:5433:4701] SUCCESS 2026-01-10T12:21:53.745086Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:53.758215Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-10T12:21:53.758263Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:21:53.846387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:53.846468Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:53.900671Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5444:4712], schemeshard count = 1 2026-01-10T12:21:54.997983Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:21:54.998037Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:21:54.998065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:21:55.001091Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:21:55.028049Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:21:55.028523Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:21:55.028587Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:21:55.029559Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:21:55.053997Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:55.054274Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:21:55.054972Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5526:4757], server id = [2:5530:4761], tablet id = 72075186224037899, status = OK 2026-01-10T12:21:55.055277Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5526:4757], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.055589Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5527:4758], server id = [2:5531:4762], tablet id = 72075186224037900, status = OK 2026-01-10T12:21:55.055630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5527:4758], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.056524Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5528:4759], server id = [2:5532:4763], tablet id = 72075186224037901, status = OK 2026-01-10T12:21:55.056582Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5528:4759], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.056924Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5529:4760], server id = [2:5533:4764], tablet id = 72075186224037902, status = OK 2026-01-10T12:21:55.056965Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5529:4760], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.060544Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:21:55.061163Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5526:4757], server id = [2:5530:4761], tablet id = 72075186224037899 2026-01-10T12:21:55.061200Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.061762Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:21:55.062178Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5527:4758], server id = [2:5531:4762], tablet id = 72075186224037900 2026-01-10T12:21:55.062209Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.062834Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:21:55.063151Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5529:4760], server id = [2:5533:4764], tablet id = 72075186224037902 2026-01-10T12:21:55.063175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.063952Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:21:55.064011Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:55.064262Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:55.064482Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:55.064847Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5546:4773], ActorId: [2:5547:4774], Starting query actor #1 [2:5548:4775] 2026-01-10T12:21:55.064930Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5547:4774], ActorId: [2:5548:4775], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:21:55.066649Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5528:4759], server id = [2:5532:4763], tablet id = 72075186224037901 2026-01-10T12:21:55.066681Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.067366Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5547:4774], ActorId: [2:5548:4775], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTIzODdhODctZjE0OTEzZTMtODhhNDQ1MTEtNjQxZjkwMQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:21:55.160019Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5557:4784]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.160321Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:55.160364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5557:4784], StatRequests.size() = 1 2026-01-10T12:21:55.303268Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5547:4774], ActorId: [2:5548:4775], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTIzODdhODctZjE0OTEzZTMtODhhNDQ1MTEtNjQxZjkwMQ==, TxId: 2026-01-10T12:21:55.303322Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5547:4774], ActorId: [2:5548:4775], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTIzODdhODctZjE0OTEzZTMtODhhNDQ1MTEtNjQxZjkwMQ==, TxId: 2026-01-10T12:21:55.303538Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5546:4773], ActorId: [2:5547:4774], Got response [2:5548:4775] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:21:55.303835Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5570:4790]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.303988Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:55.304758Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.304806Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:55.305033Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.305088Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:55.305139Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:55.308128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:14.411274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:14.546808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:14.555628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:14.556224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:14.556351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:15.097592Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:15.215218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:15.215384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:15.251558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:15.346909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:16.751974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:16.753021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:16.753073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:16.753107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:16.753673Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:16.820367Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:17.416420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:20.773468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:20.781720Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:20.785355Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:20.824342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:20.824472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:20.865212Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:20.868212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.092952Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.096331Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.097208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.098117Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.098300Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.098664Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.098860Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.098976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.099122Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.278768Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:21.301494Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:21.301619Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:21.316056Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.489701Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:21.533428Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:21.533550Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:21.572177Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:21.577203Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:21.577465Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:21.577535Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:21.577591Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:21.577662Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:21.577724Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:21.577787Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:21.579193Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:21.700034Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.700193Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2208:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.718847Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2240:2634] 2026-01-10T12:21:21.719128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2240:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:21.877448Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2413:2743] 2026-01-10T12:21:21.878023Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:21.880234Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:21.880279Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Creating table 2026-01-10T12:21:21.880345Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:21.883744Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2432:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:21.887276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:21.894264Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:21.894372Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:21.905427Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:22.145751Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:22.373548Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:22.505053Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:22.505141Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Column diff is empty, finishing 2026-01-10T12:21:23.153125Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... rd count = 1 2026-01-10T12:21:51.839881Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:21:51.839940Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:21:51.839973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:21:51.843191Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:21:51.873638Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:21:51.874421Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:21:51.874508Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:21:51.876007Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:21:51.901423Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:51.901662Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:21:51.902714Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5590:4768], server id = [2:5594:4772], tablet id = 72075186224037899, status = OK 2026-01-10T12:21:51.903210Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5590:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:51.904399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5591:4769], server id = [2:5595:4773], tablet id = 72075186224037900, status = OK 2026-01-10T12:21:51.904469Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5591:4769], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:51.904957Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5592:4770], server id = [2:5596:4774], tablet id = 72075186224037901, status = OK 2026-01-10T12:21:51.905019Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5592:4770], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:51.905951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5593:4771], server id = [2:5598:4776], tablet id = 72075186224037902, status = OK 2026-01-10T12:21:51.906013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5593:4771], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:51.910327Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:21:51.910829Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5590:4768], server id = [2:5594:4772], tablet id = 72075186224037899 2026-01-10T12:21:51.910861Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:51.911487Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:21:51.911728Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5591:4769], server id = [2:5595:4773], tablet id = 72075186224037900 2026-01-10T12:21:51.911750Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:51.912495Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:21:51.912678Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5592:4770], server id = [2:5596:4774], tablet id = 72075186224037901 2026-01-10T12:21:51.912698Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:51.912832Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:21:51.912858Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:51.912961Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:51.913273Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5593:4771], server id = [2:5598:4776], tablet id = 72075186224037902 2026-01-10T12:21:51.913292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:51.936793Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:51.937019Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-10T12:21:52.429041Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 2 is different from the current 3 2026-01-10T12:21:52.429110Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:21:55.415724Z node 2 :STATISTICS INFO: service_impl.cpp:418: Node 3 is unavailable 2026-01-10T12:21:55.415799Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:55.415953Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 3 is different from the current 0 2026-01-10T12:21:55.415981Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:21:55.416062Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:55.416139Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:55.416702Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:21:55.431142Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:55.431330Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2026-01-10T12:21:55.431882Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5714:4829], server id = [2:5715:4830], tablet id = 72075186224037900, status = OK 2026-01-10T12:21:55.431965Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5714:4829], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:55.433045Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:21:55.433097Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:55.433286Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5714:4829], server id = [2:5715:4830], tablet id = 72075186224037900 2026-01-10T12:21:55.433307Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:55.433363Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:55.433507Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:55.433965Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5717:4832], ActorId: [2:5718:4833], Starting query actor #1 [2:5719:4834] 2026-01-10T12:21:55.434027Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5718:4833], ActorId: [2:5719:4834], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:21:55.436210Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5718:4833], ActorId: [2:5719:4834], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Yzk0ZmNlZWItNjk0OWJlMzItNGQ0NWQ5NzMtMmI0YTdjNmM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:21:55.518918Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5728:4843]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.519121Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:55.519175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5728:4843], StatRequests.size() = 1 2026-01-10T12:21:55.643349Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5718:4833], ActorId: [2:5719:4834], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yzk0ZmNlZWItNjk0OWJlMzItNGQ0NWQ5NzMtMmI0YTdjNmM=, TxId: 2026-01-10T12:21:55.643404Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5718:4833], ActorId: [2:5719:4834], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yzk0ZmNlZWItNjk0OWJlMzItNGQ0NWQ5NzMtMmI0YTdjNmM=, TxId: 2026-01-10T12:21:55.643662Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5717:4832], ActorId: [2:5718:4833], Got response [2:5719:4834] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:21:55.643956Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5741:4849]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:55.644214Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:55.644506Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.644545Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:55.645026Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:55.645089Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:55.645142Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:55.649797Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault >> CrossShardUniqIndexValidationTest::Validation [GOOD] >> IndexBuildTest::RejectsCreate >> FulltextIndexBuildTest::Basic >> VectorIndexBuildTest::Metering_Documentation_Formula [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false >> VectorIndexBuildTest::RecreatedColumns >> VectorIndexBuildTest::CreateAndDrop >> IndexBuildTest::Metering_Documentation_Formula [GOOD] >> IndexBuildTest::LockUniq >> IndexBuildTest::CheckLimitWithDroppedIndexUniq >> IndexBuildTest::CancellationNotEnoughRetries >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table >> TxUsage::WriteToTopic_Demo_14_Query [GOOD] >> TxUsage::WriteToTopic_Demo_16_Table >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> VectorIndexBuildTest::TTxReply_DoExecute_Throws >> IndexBuildTest::LockUniq [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query >> FulltextIndexBuildTest::Basic [GOOD] >> FulltextIndexBuildTest::FlatRelevance ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:14.890459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:15.024891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:15.035007Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:15.035473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:15.035564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:15.535656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:15.688843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:15.688998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:15.726512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:15.847351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:16.751768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:16.752409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:16.752536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:16.752570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:16.753154Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:16.819601Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:17.445394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:20.846958Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:20.855286Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:20.859417Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:20.896007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:20.896178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:20.937049Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:20.940699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.119182Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.119779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.120557Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121248Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121563Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121678Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121757Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.121833Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.260928Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:21.316422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:21.316527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:21.329926Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.500472Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:21.546460Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:21.546621Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:21.580513Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:21.582242Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:21.582464Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:21.582536Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:21.582643Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:21.582701Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:21.582757Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:21.582807Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:21.583603Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:21.714801Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.714963Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.735228Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:21:21.735619Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:21.893440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:21:21.893823Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:21.895661Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:21.895704Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:21:21.895763Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:21.898952Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:21.902367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:21.910059Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:21.910178Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:21.924686Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:22.116837Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:22.336463Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:22.440890Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:22.440973Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:21:23.092950Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ed schemeshards count = 1 2026-01-10T12:21:56.033012Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:21:56.033145Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:21:56.033214Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5477:4713], StatRequests.size() = 1 2026-01-10T12:21:56.033275Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:56.141582Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5467:4703], ActorId: [2:5468:4704], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjRjZTdlYmMtZjBlM2FhYmMtZmEzZGMyNDQtY2E1Zjk1YzE=, TxId: 2026-01-10T12:21:56.141648Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5467:4703], ActorId: [2:5468:4704], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjRjZTdlYmMtZjBlM2FhYmMtZmEzZGMyNDQtY2E1Zjk1YzE=, TxId: 2026-01-10T12:21:56.141949Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5466:4702], ActorId: [2:5467:4703], Got response [2:5468:4704] SUCCESS 2026-01-10T12:21:56.142161Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:56.155820Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-10T12:21:56.155872Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:21:56.232607Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:56.232678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:56.296574Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5479:4715], schemeshard count = 1 2026-01-10T12:21:57.277679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:21:57.277755Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:21:57.277793Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:21:57.282428Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:21:57.313746Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:21:57.314538Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:21:57.314617Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:21:57.315977Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2026-01-10T12:21:57.316042Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:63: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2026-01-10T12:21:57.316123Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:58.257905Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:21:58.282294Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:21:58.282539Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:21:58.283280Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5588:4770], server id = [2:5592:4774], tablet id = 72075186224037899, status = OK 2026-01-10T12:21:58.283713Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5588:4770], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:58.283922Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5589:4771], server id = [2:5593:4775], tablet id = 72075186224037900, status = OK 2026-01-10T12:21:58.283992Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5589:4771], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:58.284325Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5590:4772], server id = [2:5594:4776], tablet id = 72075186224037901, status = OK 2026-01-10T12:21:58.284385Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5590:4772], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:58.285234Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5591:4773], server id = [2:5595:4777], tablet id = 72075186224037902, status = OK 2026-01-10T12:21:58.285277Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5591:4773], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:21:58.289392Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:21:58.289685Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:21:58.290316Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5588:4770], server id = [2:5592:4774], tablet id = 72075186224037899 2026-01-10T12:21:58.290358Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:58.290545Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5589:4771], server id = [2:5593:4775], tablet id = 72075186224037900 2026-01-10T12:21:58.290565Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:58.290978Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:21:58.291189Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5590:4772], server id = [2:5594:4776], tablet id = 72075186224037901 2026-01-10T12:21:58.291211Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:58.291356Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:21:58.291388Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:21:58.291497Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:21:58.291601Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:21:58.291889Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5591:4773], server id = [2:5595:4777], tablet id = 72075186224037902 2026-01-10T12:21:58.291914Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:21:58.292123Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5608:4786], ActorId: [2:5609:4787], Starting query actor #1 [2:5610:4788] 2026-01-10T12:21:58.292170Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5609:4787], ActorId: [2:5610:4788], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:21:58.294251Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5609:4787], ActorId: [2:5610:4788], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjhjZDUzMTgtOTRhMmE5ZDQtZGQxMzk3OTktMjA3MjA4NzM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:21:58.369081Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5619:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:58.369298Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:21:58.369330Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5619:4797], StatRequests.size() = 1 2026-01-10T12:21:58.488908Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5609:4787], ActorId: [2:5610:4788], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjhjZDUzMTgtOTRhMmE5ZDQtZGQxMzk3OTktMjA3MjA4NzM=, TxId: 2026-01-10T12:21:58.488965Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5609:4787], ActorId: [2:5610:4788], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjhjZDUzMTgtOTRhMmE5ZDQtZGQxMzk3OTktMjA3MjA4NzM=, TxId: 2026-01-10T12:21:58.489341Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5608:4786], ActorId: [2:5609:4787], Got response [2:5610:4788] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:21:58.489802Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5632:4803]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:58.490144Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:21:58.490857Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:58.490915Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:21:58.491286Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:21:58.491342Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:21:58.491396Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:21:58.496533Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> IndexBuildTest::CheckLimitWithDroppedIndexUniq [GOOD] >> IndexBuildTest::Lock >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsCreateUniq >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 12460303811550967563 2026-01-10T12:17:54.692842Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-10T12:17:54.718847Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-10T12:17:54.718923Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-10T12:17:54.721646Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-10T12:17:54.737789Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:17:54.740641Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-10T12:21:58.593224Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-10T12:21:58.593293Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-10T12:21:58.755120Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::WithFollowers >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |92.9%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUserConstraint::KqpReadNull+UploadNull >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query |92.9%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_24_Query >> VectorIndexBuildTest::TTxReply_DoExecute_Throws [GOOD] >> VectorIndexBuildTest::TTxProgress_Throws >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> FulltextIndexBuildTest::FlatRelevance [GOOD] >> IndexBuildTest::BaseCase |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:21.637701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:21.752209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:21.758870Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:21.759155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:21.759214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:22.103049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:22.206088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:22.206246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:22.240637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:22.324381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:22.989225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:22.990772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:22.990834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:22.990868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:22.991035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:23.057022Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:23.659682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:27.153255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:27.161827Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:27.164765Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:27.201457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.201607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.232364Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:27.237081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.477717Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.477828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.479268Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.479792Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.480583Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.481446Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.481804Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.482023Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.482183Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.482362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.482505Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:27.501733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.701224Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.752890Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:27.753033Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:27.786536Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:27.787685Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:27.787867Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:27.787910Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:27.787947Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:27.787987Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:27.788023Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:27.788060Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:27.788646Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:27.805992Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.806095Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:27.828728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:27.829861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:27.881557Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:27.884118Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:27.900050Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:27.900127Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:27.900206Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:27.909313Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:27.928683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:27.954235Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:27.954347Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:27.966233Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:28.145673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:28.185215Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:28.509565Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:28.651492Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:28.651590Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:29.345420Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... :21:31.209950Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:21:31.432451Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3161:3253] 2026-01-10T12:21:31.435873Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3159:3464] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-10T12:21:31.435969Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-10T12:21:31.436034Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-10T12:21:31.498285Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-10T12:21:31.540142Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037899 not found 2026-01-10T12:21:31.540610Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037900 not found 2026-01-10T12:21:31.541111Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037902 not found 2026-01-10T12:21:31.541424Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037901 not found 2026-01-10T12:21:31.654552Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:21:31.654642Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:21:31.753183Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3061:3439], schemeshard count = 1 2026-01-10T12:21:35.242321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:21:35.242611Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 1 2026-01-10T12:21:35.242764Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-10T12:21:38.154305Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:21:40.199163Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:21:40.199475Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 2 2026-01-10T12:21:40.199658Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 2 2026-01-10T12:21:43.149628Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:21:43.203761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:43.203841Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:45.264884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:45.264943Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:45.321539Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:21:45.321764Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 3 2026-01-10T12:21:45.321919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-10T12:21:48.329806Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:21:50.317030Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:21:50.317286Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2026-01-10T12:21:50.317434Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:21:53.250489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:21:55.165154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:21:55.165386Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-10T12:21:55.165561Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:21:58.079148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:21:58.221626Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:21:58.221712Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:21:58.221764Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:21:58.221796Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:22:00.098302Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:22:00.098609Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-10T12:22:00.098821Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:22:00.131678Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:22:00.131752Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:22:00.131963Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:22:00.144939Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:22:00.155658Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:22:00.155750Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:667: [72075186224037894] ScheduleNextAnalyze. table [OwnerId: 72075186224037897, LocalPathId: 34] was deleted, deleting its statistics 2026-01-10T12:22:00.156112Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4053:3593], ActorId: [2:4054:3594], Starting query actor #1 [2:4055:3595] 2026-01-10T12:22:00.156173Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4054:3594], ActorId: [2:4055:3595], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:00.158992Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4054:3594], ActorId: [2:4055:3595], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODFkYTMzY2YtYTEwZmMxOWEtNTAxNGIwOS1hNmZjYWMyNA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-10T12:22:00.222235Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4064:3604]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:00.222424Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:00.222488Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4066:3606] 2026-01-10T12:22:00.222534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4066:3606] 2026-01-10T12:22:00.222821Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4067:3607] 2026-01-10T12:22:00.222968Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4066:3606], server id = [2:4067:3607], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:00.223032Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4067:3607], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:00.223069Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:00.223157Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:00.223213Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4064:3604], StatRequests.size() = 1 2026-01-10T12:22:00.223295Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:22:00.327951Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4054:3594], ActorId: [2:4055:3595], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODFkYTMzY2YtYTEwZmMxOWEtNTAxNGIwOS1hNmZjYWMyNA==, TxId: 2026-01-10T12:22:00.328031Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4054:3594], ActorId: [2:4055:3595], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODFkYTMzY2YtYTEwZmMxOWEtNTAxNGIwOS1hNmZjYWMyNA==, TxId: 2026-01-10T12:22:00.328343Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4053:3593], ActorId: [2:4054:3594], Got response [2:4055:3595] SUCCESS 2026-01-10T12:22:00.328730Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:00.341520Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:00.341599Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3159:3464] 2026-01-10T12:22:00.342046Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4087:3619]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:22:00.343974Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:00.344015Z node 2 :STATISTICS ERROR: service_impl.cpp:799: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2026-01-10T12:22:00.344058Z node 2 :STATISTICS DEBUG: service_impl.cpp:1329: ReplyFailed(), request id = 2 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::WithFollowersUniq |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> IndexBuildTest::RejectsCreateUniq [GOOD] >> IndexBuildTest::RejectsDropIndex |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy >> VectorIndexBuildTest::TTxProgress_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Throws |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> IndexBuildTest::WithFollowersUniq [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::RejectsDropIndexUniq >> KqpUserConstraint::KqpReadNull-UploadNull >> VectorIndexBuildTest::RecreatedColumns [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false >> IndexBuildTest::IndexPartitioningIsPersistedUniq [GOOD] >> IndexBuildTest::DropIndex >> VectorIndexBuildTest::CreateAndDrop [GOOD] >> VectorIndexBuildTest::Metering_CommonDB ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:26.779433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:26.873244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:26.878289Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:26.878451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:26.878539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:27.255232Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.360396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.360529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.429528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.558749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:28.223691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:28.224332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:28.224388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:28.224426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:28.225021Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:28.302523Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:28.864516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:32.223522Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:32.232737Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:32.236861Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:32.271893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:32.272029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:32.312501Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:32.314353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:32.500239Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:32.500363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:32.502038Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.502781Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.503595Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.504978Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.505395Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.505563Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.505698Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.505919Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.506080Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.521678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:32.736889Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:32.817432Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:32.817534Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:32.860146Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:32.860953Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:32.861182Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:32.861262Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:32.861326Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:32.861376Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:32.861433Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:32.861506Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:32.862073Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:32.873723Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:32.873843Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:32.899125Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:21:32.899443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:21:32.957880Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:21:32.960114Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:32.972567Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:21:32.972625Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:21:32.972717Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:32.976676Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:32.980821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.988461Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:32.988563Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:32.999765Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:33.154085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:33.225974Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:33.549183Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:33.656983Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:33.657070Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:21:34.258839Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-10T12:22:02.704504Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-10T12:22:02.704545Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-10T12:22:02.704635Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-10T12:22:02.704717Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:02.704838Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:02.704902Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:22:02.704981Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:22:02.705056Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:02.705213Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:02.706361Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:02.706588Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5587:4794] Owner: [2:5586:4793]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:02.706652Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5587:4794] Owner: [2:5586:4793]. Column diff is empty, finishing 2026-01-10T12:22:02.707418Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:02.707494Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:02.709022Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:02.709097Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:02.710346Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:02.730370Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5596:4801] 2026-01-10T12:22:02.730567Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5548:4772], server id = [2:5596:4801], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:02.730823Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5596:4801], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:02.731075Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5597:4802] 2026-01-10T12:22:02.731202Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5597:4802], schemeshard id = 72075186224037897 2026-01-10T12:22:02.758331Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:02.758550Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2026-01-10T12:22:02.759198Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5536:4760], server id = [2:5540:4764], tablet id = 72075186224037900 2026-01-10T12:22:02.759247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:02.760023Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5602:4807], server id = [2:5606:4811], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:02.760161Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5602:4807], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:02.760546Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5603:4808], server id = [2:5607:4812], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:02.760612Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5603:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:02.760833Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5604:4809], server id = [2:5608:4813], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:02.760888Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5604:4809], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:02.763098Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5605:4810], server id = [2:5609:4814], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:02.763173Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5605:4810], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:02.763938Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:02.764503Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5602:4807], server id = [2:5606:4811], tablet id = 72075186224037899 2026-01-10T12:22:02.764541Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:02.765346Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:02.765785Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5604:4809], server id = [2:5608:4813], tablet id = 72075186224037901 2026-01-10T12:22:02.765819Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:02.767017Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:02.767381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5605:4810], server id = [2:5609:4814], tablet id = 72075186224037902 2026-01-10T12:22:02.767416Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:02.768552Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:02.768615Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:02.768985Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:02.769181Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:02.769449Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5616:4820], ActorId: [2:5617:4821], Starting query actor #1 [2:5618:4822] 2026-01-10T12:22:02.769541Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5617:4821], ActorId: [2:5618:4822], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:02.772808Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5603:4808], server id = [2:5607:4812], tablet id = 72075186224037900 2026-01-10T12:22:02.772849Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:02.773808Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5617:4821], ActorId: [2:5618:4822], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OThhYTA2YTMtNDVkZDcxZmEtZTQyOGIyOTktNWY5NmQ3ZTg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:02.895235Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5627:4831]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:02.895628Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:02.895696Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5627:4831], StatRequests.size() = 1 2026-01-10T12:22:03.111279Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5617:4821], ActorId: [2:5618:4822], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OThhYTA2YTMtNDVkZDcxZmEtZTQyOGIyOTktNWY5NmQ3ZTg=, TxId: 2026-01-10T12:22:03.111366Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5617:4821], ActorId: [2:5618:4822], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OThhYTA2YTMtNDVkZDcxZmEtZTQyOGIyOTktNWY5NmQ3ZTg=, TxId: 2026-01-10T12:22:03.111814Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5616:4820], ActorId: [2:5617:4821], Got response [2:5618:4822] SUCCESS 2026-01-10T12:22:03.112288Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:03.127816Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:03.127883Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:03.216676Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5646:4839]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:03.217085Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:03.217131Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:03.217315Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:03.217353Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:03.217402Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:03.220364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> VectorIndexBuildTest::TTxInit_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex >> IndexBuildTest::RejectsDropIndexUniq [GOOD] >> IndexBuildTest::RejectsCancelUniq |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::DropIndexUniq >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:22.966113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:23.077994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:23.088714Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:23.088945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:23.089081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:23.537286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:23.643334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:23.643484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:23.714999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:23.841537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:24.568602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:24.569154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:24.569201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:24.569233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:24.569792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:24.637439Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:25.202178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:28.454789Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:28.462501Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:28.465917Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:28.499899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.500018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.539789Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:28.541624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.720160Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.720264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.721826Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.722541Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.723382Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.724224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.724538Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.724658Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.724758Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.724989Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.725161Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.741311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.993820Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:29.016895Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:29.017059Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:29.051259Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:29.052767Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:29.053034Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:29.053098Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:29.053161Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:29.053244Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:29.053303Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:29.053355Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:29.053896Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:29.080466Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:29.080569Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2124:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:29.111248Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2197:2642] 2026-01-10T12:21:29.111625Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2197:2642], schemeshard id = 72075186224037897 2026-01-10T12:21:29.168433Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2249:2666] 2026-01-10T12:21:29.170314Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:21:29.183046Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Describe result: PathErrorUnknown 2026-01-10T12:21:29.183105Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Creating table 2026-01-10T12:21:29.183170Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:29.188140Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2306:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:29.191560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:29.199125Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:29.199248Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:29.213264Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:29.395840Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:29.426782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:21:29.790960Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:29.901402Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:29.901492Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2254:2670] Owner: [2:2253:2669]. Column diff is empty, finishing 2026-01-10T12:21:30.450202Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... = 1 2026-01-10T12:22:03.373395Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:03.373453Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6196:5128], StatRequests.size() = 1 2026-01-10T12:22:03.373505Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:22:03.490864Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6186:5118], ActorId: [2:6187:5119], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjA5NDNjNS1iZjAzMmQ3Mi0xZmQ3ZGYxMS1mNWNkM2NiMA==, TxId: 2026-01-10T12:22:03.490935Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6186:5118], ActorId: [2:6187:5119], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjA5NDNjNS1iZjAzMmQ3Mi0xZmQ3ZGYxMS1mNWNkM2NiMA==, TxId: 2026-01-10T12:22:03.491253Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6185:5117], ActorId: [2:6186:5118], Got response [2:6187:5119] SUCCESS 2026-01-10T12:22:03.491476Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:03.506253Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-10T12:22:03.506301Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:03.584923Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:22:03.584991Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:22:03.674279Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6198:5130], schemeshard count = 1 2026-01-10T12:22:04.153896Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-10T12:22:04.153965Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.131000s, at schemeshard: 72075186224037899 2026-01-10T12:22:04.154168Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-10T12:22:04.168053Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:22:04.824264Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:22:04.824343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 32] is column table. 2026-01-10T12:22:04.824380Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-10T12:22:04.828464Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:04.858977Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:04.859793Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:04.859880Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:04.861436Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:04.875162Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:04.875384Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:22:04.876161Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6279:5174], server id = [2:6283:5178], tablet id = 72075186224037905, status = OK 2026-01-10T12:22:04.876425Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6279:5174], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-10T12:22:04.877390Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6280:5175], server id = [2:6284:5179], tablet id = 72075186224037906, status = OK 2026-01-10T12:22:04.877458Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6280:5175], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-10T12:22:04.877793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6282:5177], server id = [2:6285:5180], tablet id = 72075186224037908, status = OK 2026-01-10T12:22:04.877852Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6282:5177], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-10T12:22:04.878604Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6281:5176], server id = [2:6286:5181], tablet id = 72075186224037907, status = OK 2026-01-10T12:22:04.878647Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6281:5176], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-10T12:22:04.882520Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037905 2026-01-10T12:22:04.882952Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6279:5174], server id = [2:6283:5178], tablet id = 72075186224037905 2026-01-10T12:22:04.882984Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:04.883275Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037906 2026-01-10T12:22:04.883857Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6280:5175], server id = [2:6284:5179], tablet id = 72075186224037906 2026-01-10T12:22:04.883882Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:04.884513Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037908 2026-01-10T12:22:04.884858Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6282:5177], server id = [2:6285:5180], tablet id = 72075186224037908 2026-01-10T12:22:04.884884Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:04.885136Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037907 2026-01-10T12:22:04.885166Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:04.885269Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:04.885371Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:04.885711Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6299:5190], ActorId: [2:6300:5191], Starting query actor #1 [2:6301:5192] 2026-01-10T12:22:04.885756Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6300:5191], ActorId: [2:6301:5192], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-10T12:22:04.887239Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6281:5176], server id = [2:6286:5181], tablet id = 72075186224037907 2026-01-10T12:22:04.887262Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:04.887826Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6300:5191], ActorId: [2:6301:5192], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YmQ4OGE2MjQtNTgyZmZhNmItOWEzYTcxYzEtYzM1ZGRlYWI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:04.970607Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6310:5201]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:04.970913Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:04.970965Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6310:5201], StatRequests.size() = 1 2026-01-10T12:22:05.090124Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6300:5191], ActorId: [2:6301:5192], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmQ4OGE2MjQtNTgyZmZhNmItOWEzYTcxYzEtYzM1ZGRlYWI=, TxId: 2026-01-10T12:22:05.090174Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6300:5191], ActorId: [2:6301:5192], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmQ4OGE2MjQtNTgyZmZhNmItOWEzYTcxYzEtYzM1ZGRlYWI=, TxId: 2026-01-10T12:22:05.090459Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6299:5190], ActorId: [2:6300:5191], Got response [2:6301:5192] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:22:05.090773Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6323:5207]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:05.090933Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:05.091560Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:05.091632Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:05.092481Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:05.092525Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-10T12:22:05.092569Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:05.095629Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex [GOOD] >> VectorIndexBuildTest::UnknownState >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:03.934366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:04.056360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:04.069336Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:04.069880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:04.069969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:04.530706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:04.530862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:04.537753Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047721843404 != 1768047721843407 2026-01-10T12:22:04.558279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:04.614953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:04.730425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:05.050362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:05.066756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:05.182996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:05.631234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:05.631338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1080:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:05.631413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:05.635011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:05.635189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:05.643026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:05.761935Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:22:05.839309Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:06.717123Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxpk1v757wjpkt0ppvk0vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmE3M2ZmMjMtZGYyYmEyMjktYjM5YTk3YmQtYjIwZjJmZWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2026-01-10T12:22:06.719369Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxpk1v757wjpkt0ppvk0vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmE3M2ZmMjMtZGYyYmEyMjktYjM5YTk3YmQtYjIwZjJmZWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2026-01-10T12:22:06.726299Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1173:2965], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxpk1v757wjpkt0ppvk0vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmE3M2ZmMjMtZGYyYmEyMjktYjM5YTk3YmQtYjIwZjJmZWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2026-01-10T12:22:06.735534Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NmE3M2ZmMjMtZGYyYmEyMjktYjM5YTk3YmQtYjIwZjJmZWQ=, ActorId: [1:1068:2895], ActorState: ExecuteState, LegacyTraceId: 01kekxpk1v757wjpkt0ppvk0vt, Create QueryResponse for error on request, msg: status# INTERNAL_ERROR issues# { message: "Read from column index 1: got NULL from NOT NULL column" issue_code: 2012 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TxUsage::WriteToTopic_Demo_45_Table >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestTimeRetention >> IndexBuildTest::DropIndexUniq [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndexUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.655342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.655395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.667872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.667944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.668057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.675018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.675226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.675954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.676148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.678206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.678355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.679432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.679492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.679562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.679609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.679653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.679876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.835074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 594046678944, cookie: 105 2026-01-10T12:22:07.886022Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.886053Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-10T12:22:07.886087Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 5 2026-01-10T12:22:07.886469Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.886520Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.886541Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.886561Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-10T12:22:07.886583Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-10T12:22:07.887414Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.887465Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.887488Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.887509Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-10T12:22:07.887531Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:22:07.888427Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.888485Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.888504Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.888831Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.888876Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.888896Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.889409Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:22:07.889460Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:22:07.889634Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-10T12:22:07.889719Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-10T12:22:07.889743Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-10T12:22:07.889767Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-10T12:22:07.889786Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-10T12:22:07.889815Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2026-01-10T12:22:07.890620Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.890671Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:22:07.890690Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:22:07.890710Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-10T12:22:07.890732Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 4 2026-01-10T12:22:07.890780Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2026-01-10T12:22:07.891589Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2026-01-10T12:22:07.891626Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:22:07.891808Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-10T12:22:07.891929Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-10T12:22:07.891964Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-10T12:22:07.892003Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-10T12:22:07.892032Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-10T12:22:07.892064Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2026-01-10T12:22:07.892145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:821:2777] message: TxId: 105 2026-01-10T12:22:07.892197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-10T12:22:07.892238Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:22:07.892273Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:22:07.892371Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:22:07.892414Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:1 2026-01-10T12:22:07.892436Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:1 2026-01-10T12:22:07.892472Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 2 2026-01-10T12:22:07.892504Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:2 2026-01-10T12:22:07.892531Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:2 2026-01-10T12:22:07.892588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-10T12:22:07.893115Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.893661Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.894144Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.894195Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.894245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.895942Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:22:07.897026Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:22:07.897073Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1348:3270] TestWaitNotification: OK eventTxId 105 |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [GOOD] >> TGRpcCmsTest::DescribeOptionsTest >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::AlterRemoveTest >> VectorIndexBuildTest::UnknownState [GOOD] >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:19:05.837529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:19:05.837643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:05.837684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:19:05.839127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:19:05.839206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:19:05.839235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:19:05.839857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:19:05.839926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:19:05.840633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:19:05.843268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:19:05.998159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:19:05.998221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:06.012489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:19:06.013167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:19:06.014500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:19:06.026617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:19:06.028447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:19:06.030369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:19:06.032638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:19:06.037989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:06.038189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:19:06.049157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:19:06.049251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:19:06.049368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:19:06.049421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:19:06.049466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:19:06.050504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:19:06.197379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.198927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:19:06.199753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 88 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 129 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-10T12:22:07.985014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:22:07.985063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0088 2026-01-10T12:22:07.985192Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:22:07.985233Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:22:08.026575Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.026647Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.026671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-10T12:22:08.026739Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-10T12:22:08.026774Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-10T12:22:08.026866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-10T12:22:08.026910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-10T12:22:08.026933Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-10T12:22:08.027005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-10T12:22:08.027095Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:22:08.037526Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.037587Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.037612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:22:08.068657Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1118:3082]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:22:08.068940Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 39 2026-01-10T12:22:08.069360Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1118:3082], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 39 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 30 Memory: 124368 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 301 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-10T12:22:08.069423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-10T12:22:08.069472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.003 2026-01-10T12:22:08.069601Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:22:08.069641Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-10T12:22:08.110992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2026-01-10T12:22:08.111071Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-10T12:22:08.111110Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-10T12:22:08.111180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2026-01-10T12:22:08.111204Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-10T12:22:08.111314Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.111344Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.111364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-10T12:22:08.111439Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-10T12:22:08.111463Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-10T12:22:08.111555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 13940 row count 100 2026-01-10T12:22:08.111597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2026-01-10T12:22:08.111631Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-10T12:22:08.111708Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:582: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2026-01-10T12:22:08.111732Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:669: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2026-01-10T12:22:08.111775Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-10T12:22:08.111839Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:22:08.122315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.122377Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:22:08.122404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:22:08.330951Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:22:08.331030Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:22:08.331123Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:22:08.331159Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:06.774962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:06.844658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:06.850409Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:06.850684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:06.850747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:07.089393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:07.089505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:07.093482Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047724737967 != 1768047724737970 2026-01-10T12:22:07.104125Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:07.144651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:07.227557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:07.506054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:07.519762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:07.623173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:07.960259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:07.960395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1080:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:07.960455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:07.961114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:07.961243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:07.964814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:08.077890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:22:08.136101Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::UnknownState [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:59.967756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:59.967849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:59.967880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:59.967903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:59.967929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:59.967953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:59.968001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:59.968090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:59.968761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:59.968985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:00.030739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:00.030788Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:00.041550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:00.041646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:00.041779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:00.047611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:00.047805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:00.048328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:00.048503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:00.050047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:00.050224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:00.051224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:00.051279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:00.051336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:00.051384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:00.051417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:00.051573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:00.161059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.161956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:00.162329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 18] was 0 2026-01-10T12:22:09.077847Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-10T12:22:09.077871Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-10T12:22:09.077893Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-10T12:22:09.077914Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-10T12:22:09.077935Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-10T12:22:09.077956Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-10T12:22:09.077978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-10T12:22:09.078000Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-10T12:22:09.078020Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-10T12:22:09.078055Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:22:09.078086Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:22:09.078108Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:22:09.078128Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:22:09.078151Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:22:09.078172Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:22:09.078191Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:22:09.078210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:22:09.078228Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:22:09.078248Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:22:09.078527Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 10, at schemeshard: 72057594046678944 2026-01-10T12:22:09.078736Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.078867Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:22:09.078919Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:22:09.078955Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:22:09.078977Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:22:09.078998Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-10T12:22:09.079175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:22:09.079464Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.079729Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 12, at schemeshard: 72057594046678944 2026-01-10T12:22:09.080211Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.080283Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:22:09.080468Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.080924Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081022Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081345Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081515Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081613Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081719Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.081916Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.082019Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.082590Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.083097Z node 5 :BUILD_INDEX DEBUG: schemeshard_index_build_info.h:626: Restored index build id# 102: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, State: Filling, SubState: None, IsBroken: 1, IsCancellationRequested: 0, Issue: Unknown build kind: 999999, SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:09.083185Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.083296Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.083355Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:09.083848Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 1 tables: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.083946Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.084034Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:09.089842Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:09.096283Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:09.096367Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:09.097628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:09.097700Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:09.097759Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:09.099149Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:1271:3170] sender: [5:1333:2058] recipient: [5:15:2062] 2026-01-10T12:22:09.170936Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-10T12:22:09.171218Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } 2026-01-10T12:22:09.172354Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=102&Page=BuildIndexInfo 2026-01-10T12:22:09.172438Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=102&Page=BuildIndexInfo |93.0%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::Metering_CommonDB [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] |93.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::AnalyzeOneTable [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> IndexBuildTest::RejectsCancelUniq [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:28.588740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:28.705907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:28.714127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:28.714602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:28.714695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:29.072595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:29.164657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:29.164812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:29.198672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:29.223631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:29.934428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:29.935911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:29.935964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:29.935999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:29.936193Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:30.001725Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:30.558480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:33.740561Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:33.748757Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:33.751624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:33.785998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.786120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.814352Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:33.816514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:34.003615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:34.003731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:34.005111Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.005567Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.005988Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.007056Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.007548Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.007868Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.008089Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.008351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.008512Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:34.023979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:34.228999Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:34.266907Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:34.267083Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:34.303178Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:34.304737Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:34.305107Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:34.305174Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:34.305227Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:34.305301Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:34.305366Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:34.305420Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:34.306111Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:34.328101Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.328210Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.352878Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:34.354008Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:34.417087Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:34.420410Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:34.438578Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:34.438651Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:34.438740Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:34.450855Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:34.476852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.500984Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:34.501102Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:34.509688Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:34.547826Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:34.717882Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:35.024216Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:35.164766Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:35.164862Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:35.859600Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ner id: 72075186224037897 2026-01-10T12:22:09.114050Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-10T12:22:09.114101Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1768047729011863 2026-01-10T12:22:09.114140Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-10T12:22:09.114180Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-10T12:22:09.114265Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-10T12:22:09.114335Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:09.114431Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:09.114517Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:22:09.114594Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:22:09.114659Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:09.114813Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:09.115999Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:09.116389Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5552:4766] Owner: [2:5551:4765]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:09.116465Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5552:4766] Owner: [2:5551:4765]. Column diff is empty, finishing 2026-01-10T12:22:09.117125Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:09.117201Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2026-01-10T12:22:09.139493Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5561:4773] 2026-01-10T12:22:09.139809Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5561:4773], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:09.139994Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5517:4745], server id = [2:5561:4773], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:09.140277Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5562:4774] 2026-01-10T12:22:09.140403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5562:4774], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2026-01-10T12:22:09.268767Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:09.268896Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:09.270546Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:09.285288Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:09.285471Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:22:09.286442Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5572:4781], server id = [2:5576:4785], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:09.286809Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5572:4781], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:09.287240Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5573:4782], server id = [2:5577:4786], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:09.287302Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5573:4782], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:09.288241Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5574:4783], server id = [2:5578:4787], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:09.288307Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5574:4783], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:09.288730Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5575:4784], server id = [2:5579:4788], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:09.288791Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5575:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:09.294827Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:09.295519Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5572:4781], server id = [2:5576:4785], tablet id = 72075186224037899 2026-01-10T12:22:09.295569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:09.297305Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:09.297770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5573:4782], server id = [2:5577:4786], tablet id = 72075186224037900 2026-01-10T12:22:09.297807Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:09.298403Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:09.298722Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5574:4783], server id = [2:5578:4787], tablet id = 72075186224037901 2026-01-10T12:22:09.298752Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:09.299158Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:09.299212Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:09.299452Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:09.299609Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:09.299856Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5592:4797], ActorId: [2:5593:4798], Starting query actor #1 [2:5594:4799] 2026-01-10T12:22:09.299922Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5593:4798], ActorId: [2:5594:4799], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:09.303218Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5575:4784], server id = [2:5579:4788], tablet id = 72075186224037902 2026-01-10T12:22:09.303254Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:09.304130Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5593:4798], ActorId: [2:5594:4799], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTAzMTE3NDAtMmU1ZGJmNWEtMWU1YTEwNGEtY2E2NzE0Njk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:09.426505Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5603:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:09.426882Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:09.426939Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5603:4808], StatRequests.size() = 1 2026-01-10T12:22:09.604195Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5593:4798], ActorId: [2:5594:4799], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTAzMTE3NDAtMmU1ZGJmNWEtMWU1YTEwNGEtY2E2NzE0Njk=, TxId: 2026-01-10T12:22:09.604270Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5593:4798], ActorId: [2:5594:4799], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTAzMTE3NDAtMmU1ZGJmNWEtMWU1YTEwNGEtY2E2NzE0Njk=, TxId: 2026-01-10T12:22:09.604662Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5592:4797], ActorId: [2:5593:4798], Got response [2:5594:4799] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:22:09.605070Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5616:4814]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:09.605399Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:09.605725Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:09.605769Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:09.606011Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:09.606066Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:09.606122Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:09.609359Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:28.257055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:28.382082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:28.393240Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:28.393729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:28.393818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:28.778820Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:28.871054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.871199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.905509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.992231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:29.628437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:29.629376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:29.629420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:29.629443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:29.629563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:29.693559Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:30.251255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:33.652137Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:33.660268Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:33.663109Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:33.698610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.698746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.727339Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:33.729519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:33.930088Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.930259Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.931651Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.932117Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.932556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.933382Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.933731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.933967Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.934149Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.934339Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.934441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.950092Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:34.189103Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:34.239917Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:34.240010Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:34.276721Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:34.278509Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:34.278758Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:34.278838Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:34.278902Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:34.278963Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:34.279021Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:34.279077Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:34.279816Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:34.302590Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.302708Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.329625Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:34.330808Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:34.395946Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:34.399522Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:34.420068Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:34.420164Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:34.420252Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:34.432658Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:34.455159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.481778Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:34.481932Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:34.492133Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:34.678929Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:34.712964Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:35.041187Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:35.181275Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:35.181348Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:35.871262Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 2026-01-10T12:22:01.301452Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:01.307557Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:3988:3531], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:01.310964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:01.353075Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:3970:3525], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:22:01.565408Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4057:3573], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:01.613781Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4056:3572] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:01.851508Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4078:3586]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:01.851773Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:01.851871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4080:3588] 2026-01-10T12:22:01.851937Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4080:3588] 2026-01-10T12:22:01.852349Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4081:3589] 2026-01-10T12:22:01.852456Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4080:3588], server id = [2:4081:3589], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:01.852547Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4081:3589], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:01.852608Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:01.852738Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:01.852821Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4078:3586], StatRequests.size() = 1 2026-01-10T12:22:01.852895Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:22:06.787146Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:3943:3511], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:06.787320Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:3943:3511], Start read next stream part 2026-01-10T12:22:06.787534Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33160, txId: 281474976720660] shutting down 2026-01-10T12:22:06.787609Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:06.789751Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:22:06.789840Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], Start read next stream part 2026-01-10T12:22:06.790009Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:3943:3511], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:06.790047Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:3943:3511], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTNkMTA5NWQtYTY4MmQxNjctMjI5YWQzMTMtNzgzZGU1NzI=, TxId: 2026-01-10T12:22:06.851852Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4184:3669]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:06.852018Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:06.852054Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4184:3669], StatRequests.size() = 1 2026-01-10T12:22:09.748555Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:09.748679Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], Start read next stream part 2026-01-10T12:22:09.749090Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33180, txId: 281474976720663] shutting down 2026-01-10T12:22:09.750330Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4252:3720], ActorId: [2:4253:3721], Starting query actor #1 [2:4254:3722] 2026-01-10T12:22:09.750394Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4253:3721], ActorId: [2:4254:3722], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:09.750836Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:09.750888Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3933:2462], ActorId: [2:4160:3649], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmJmOGQzMjMtNWM5MzVhMmEtNDllZDNjYTYtODVhNTdmMzE=, TxId: 2026-01-10T12:22:09.754326Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4253:3721], ActorId: [2:4254:3722], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmVlOGQwMzYtYTZiOGEyZGMtM2VmNGZlYjYtMTZiZmM0MjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:09.807382Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4266:3733]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:09.807588Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:09.807617Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4266:3733], StatRequests.size() = 1 2026-01-10T12:22:09.932707Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4253:3721], ActorId: [2:4254:3722], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmVlOGQwMzYtYTZiOGEyZGMtM2VmNGZlYjYtMTZiZmM0MjI=, TxId: 2026-01-10T12:22:09.932803Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4253:3721], ActorId: [2:4254:3722], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmVlOGQwMzYtYTZiOGEyZGMtM2VmNGZlYjYtMTZiZmM0MjI=, TxId: 2026-01-10T12:22:09.933160Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4252:3720], ActorId: [2:4253:3721], Got response [2:4254:3722] SUCCESS 2026-01-10T12:22:09.933776Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:09.947369Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:09.947448Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3051:3427] 2026-01-10T12:22:09.948110Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4288:3747]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:22:09.948419Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:22:09.948476Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:09.948768Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:22:09.948831Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:09.948887Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:09.949142Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:22:09.952890Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:22:09.953955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-10T12:22:09.954034Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_16_Table [GOOD] >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] >> TPQTest::TestTabletRestoreEventsOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-10T12:20:37.503278Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-10T12:20:37.503338Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-10T12:20:37.503496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-10T12:20:37.503535Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:6:2053] Successful handshake: owner# 800, generation# 1 2026-01-10T12:20:37.503613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-10T12:20:37.503659Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:3:2050] Commit generation: owner# 800, generation# 1 2026-01-10T12:20:37.503875Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-10T12:20:37.503910Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:6:2053] Commit generation: owner# 800, generation# 1 2026-01-10T12:20:37.503991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.504326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2069] 2026-01-10T12:20:37.504367Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:3:2050] Upsert description: path# /root/tenant 2026-01-10T12:20:37.504443Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:3:2050] Subscribe: subscriber# [1:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.504539Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:44:2069] 2026-01-10T12:20:37.504559Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# /root/tenant 2026-01-10T12:20:37.504582Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.504651Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:45:2069] 2026-01-10T12:20:37.504668Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:9:2056] Upsert description: path# /root/tenant 2026-01-10T12:20:37.504687Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:9:2056] Subscribe: subscriber# [1:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.504735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2026-01-10T12:20:37.504789Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2069] 2026-01-10T12:20:37.504833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2026-01-10T12:20:37.504858Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:44:2069] 2026-01-10T12:20:37.504882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:45:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2026-01-10T12:20:37.504912Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:45:2069] 2026-01-10T12:20:37.504976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2069] 2026-01-10T12:20:37.505020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:41:2069] 2026-01-10T12:20:37.505048Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:39:2069][/root/tenant] Set up state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:20:37.505103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:42:2069] 2026-01-10T12:20:37.505134Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:39:2069][/root/tenant] Ignore empty state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2026-01-10T12:20:37.505277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:36:2066], cookie# 0, event size# 103 2026-01-10T12:20:37.505305Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-10T12:20:37.505346Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-10T12:20:37.505446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2026-01-10T12:20:37.505484Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:43:2069] 2026-01-10T12:20:37.505527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:40:2069] 2026-01-10T12:20:37.505568Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:39:2069][/root/tenant] Update to strong state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-10T12:20:37.963788Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-10T12:20:37.963866Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-10T12:20:37.964008Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-10T12:20:37.964037Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:3:2050] Commit generation: owner# 800, generation# 1 2026-01-10T12:20:37.964094Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-10T12:20:37.964120Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:6:2053] Successful handshake: owner# 900, generation# 1 2026-01-10T12:20:37.964263Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-10T12:20:37.964301Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:6:2053] Commit generation: owner# 900, generation# 1 2026-01-10T12:20:37.964455Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:20:37.964807Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2069] 2026-01-10T12:20:37.964837Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:3:2050] Upsert description: path# /root/tenant 2026-01-10T12:20:37.964914Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:3:2050] Subscribe: subscriber# [3:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.965062Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:44:2069] 2026-01-10T12:20:37.965088Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:6:2053] Upsert description: path# /root/tenant 2026-01-10T12:20:37.965136Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:6:2053] Subscribe: subscriber# [3:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.965267Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:45:2069] 2026-01-10T12:20:37.965290Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:9:2056] Upsert description: path# /root/tenant 2026-01-10T12:20:37.965332Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:9:2056] Subscribe: subscriber# [3:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:20:37.965380Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2026-01-10T12:20:37.965422Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2069] 2026-01-10T12:20:37.965477Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2026-01-10 ... 94: [399:3:2050] Commit generation: owner# 910, generation# 1 2026-01-10T12:22:11.534939Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-10T12:22:11.534970Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [399:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-10T12:22:11.535143Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-10T12:22:11.535176Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [399:6:2053] Commit generation: owner# 910, generation# 1 2026-01-10T12:22:11.535343Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][399:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[399:24339059:0], [399:1099535966835:0], [399:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:22:11.535793Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2069] 2026-01-10T12:22:11.535829Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.535901Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:3:2050] Subscribe: subscriber# [399:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.536062Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:44:2069] 2026-01-10T12:22:11.536107Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.536164Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:6:2053] Subscribe: subscriber# [399:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.536328Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:45:2069] 2026-01-10T12:22:11.536358Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.536400Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:9:2056] Subscribe: subscriber# [399:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.536474Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2026-01-10T12:22:11.536555Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2069] 2026-01-10T12:22:11.536618Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2026-01-10T12:22:11.536660Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:44:2069] 2026-01-10T12:22:11.536717Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2026-01-10T12:22:11.536763Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:45:2069] 2026-01-10T12:22:11.536850Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2069] 2026-01-10T12:22:11.536922Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:41:2069] 2026-01-10T12:22:11.536973Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][399:39:2069][/Root/Tenant/table_inside] Set up state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:22:11.537032Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:42:2069] 2026-01-10T12:22:11.537075Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][399:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2026-01-10T12:22:11.986686Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-10T12:22:11.986746Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:3:2050] Successful handshake: owner# 910, generation# 1 2026-01-10T12:22:11.986837Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-10T12:22:11.986858Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:3:2050] Commit generation: owner# 910, generation# 1 2026-01-10T12:22:11.986888Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-10T12:22:11.986907Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-10T12:22:11.987022Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-10T12:22:11.987043Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:6:2053] Commit generation: owner# 910, generation# 1 2026-01-10T12:22:11.987127Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][401:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[401:24339059:0], [401:1099535966835:0], [401:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:22:11.987425Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:43:2069] 2026-01-10T12:22:11.987448Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.987500Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:3:2050] Subscribe: subscriber# [401:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.987623Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:44:2069] 2026-01-10T12:22:11.987643Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.987681Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:6:2053] Subscribe: subscriber# [401:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.987782Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:45:2069] 2026-01-10T12:22:11.987798Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-10T12:22:11.987822Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:9:2056] Subscribe: subscriber# [401:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-10T12:22:11.987883Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:3:2050] 2026-01-10T12:22:11.987930Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:43:2069] 2026-01-10T12:22:11.987961Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:6:2053] 2026-01-10T12:22:11.987988Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:44:2069] 2026-01-10T12:22:11.988017Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:9:2056] 2026-01-10T12:22:11.988041Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:45:2069] 2026-01-10T12:22:11.988111Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:40:2069] 2026-01-10T12:22:11.988164Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:41:2069] 2026-01-10T12:22:11.988215Z node 401 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][401:39:2069][/Root/Tenant/table_inside] Set up state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:22:11.988257Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:42:2069] 2026-01-10T12:22:11.988286Z node 401 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][401:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::BaseCaseUniq >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true >> TxUsage::WriteToTopic_Demo_27_Table >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> TGRpcCmsTest::AuthTokenTest [GOOD] |93.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2026-01-10T12:22:13.206746Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.215704Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2026-01-10T12:22:13.219819Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.219997Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2026-01-10T12:22:13.220066Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.220645Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-10T12:22:13.221823Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2026-01-10T12:22:13.221901Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.221985Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2026-01-10T12:22:13.222017Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.222070Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-10T12:22:13.222098Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.222136Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2026-01-10T12:22:13.222167Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.222202Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2026-01-10T12:22:13.222254Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.222317Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-10T12:22:13.222436Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-10T12:22:13.222516Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2026-01-10T12:22:13.222558Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.222598Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-10T12:22:13.222614Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.222638Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-10T12:22:13.222653Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.222675Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 7 2026-01-10T12:22:13.222721Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2026-01-10T12:22:13.222737Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.233556Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 1 has already been processed 2026-01-10T12:22:13.233636Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 2 2026-01-10T12:22:13.233679Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-10T12:22:13.233743Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 3 has already been processed 2026-01-10T12:22:13.233807Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 4 2026-01-10T12:22:13.233824Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-10T12:22:13.233868Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 5 has already been processed 2026-01-10T12:22:13.233887Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 6 2026-01-10T12:22:13.233904Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 6 is not local. 2026-01-10T12:22:13.233946Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:13.234048Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.234081Z node 1 :STATISTICS DEBUG: service_impl.cpp:1027: Skip TEvStatisticsRequestTimeout 2026-01-10T12:22:13.234142Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-10T12:22:13.234168Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.234211Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-10T12:22:13.234233Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.234292Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-10T12:22:13.234311Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_16_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2026-01-10T12:22:09.501791Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707177179739110:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:09.501854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:09.755724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:09.793174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:09.793302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:09.808953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:09.855636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:09.977198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:10.019155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:10.019188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:10.019194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:10.019281Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:10.397795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:10.462401Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7593707181474707419:2301], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:47396" } 2026-01-10T12:22:10.462463Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-10T12:22:10.462482Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.462525Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.462767Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:47396" 2026-01-10T12:22:10.463452Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1768047730463122) 2026-01-10T12:22:10.506528Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1768047730463122 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-10T12:22:10.506796Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-10T12:22:10.508285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:10.511223Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-10T12:22:10.512702Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730463122&action=1" } } } 2026-01-10T12:22:10.513561Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.513677Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:22:10.513859Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:22:10.514290Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-10T12:22:10.514458Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:22:10.520490Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707181474707436:2303], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730463122&action=1" } UserToken: "" } 2026-01-10T12:22:10.520535Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:10.520762Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730463122&action=1" } } 2026-01-10T12:22:10.520882Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-10T12:22:10.520983Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.521048Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7593707181474707433:2209], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.521084Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.521095Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.521109Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.521157Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-10T12:22:10.521175Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-10T12:22:10.521225Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-10T12:22:10.523893Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:10.523922Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.523929Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.523955Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.524009Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-10T12:22:10.524112Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1768047730463122 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.526418Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:22:10.553788Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.553891Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-10T12:22:10.553903Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-10T12:22:10.569523Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:47396" 2026-01-10T12:22:10.571703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:22:10.574351Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707181474707502:2306], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730463122&action=1" } UserToken: "" } 2026-01-10T12:22:10.574382Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:10.574554Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730463122&action=1" } } 2026-01-10T12:22:10.574821Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-10T12:22:10.574918Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715 ... te.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:10.642047Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.642158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp:342) 2026-01-10T12:22:10.644454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5826: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 39] by tx: 281474976715660 2026-01-10T12:22:10.645048Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2026-01-10T12:22:10.645067Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:22:10.645089Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.645180Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2026-01-10T12:22:10.645211Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715660 2026-01-10T12:22:10.645283Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7593707181474707541:2209], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.645306Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.645323Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3652: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2026-01-10T12:22:10.647677Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715660 2026-01-10T12:22:10.654695Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-10T12:22:10.654813Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-01-10T12:22:10.654854Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-10T12:22:10.654905Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-01-10T12:22:10.655047Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2026-01-10T12:22:10.655069Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-01-10T12:22:10.655095Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-10T12:22:10.655127Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2026-01-10T12:22:10.655164Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-01-10T12:22:10.656884Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2026-01-10T12:22:10.656908Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:22:10.656959Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:10.657079Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7593707181474707614:2209], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:10.657106Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:10.657119Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.657125Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.657161Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-10T12:22:10.657206Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1768047730636181 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.657244Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047730636181 issue= 2026-01-10T12:22:10.662603Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-10T12:22:10.662658Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.662673Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.663013Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707177179739401:2208], Recipient [1:7593707177179739541:2209]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.663030Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.663045Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.663054Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.663080Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-10T12:22:10.663102Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1768047730636181 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.664962Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:22:10.665011Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.665036Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:22:10.665139Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:22:10.665699Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-10T12:22:10.665816Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-10T12:22:10.669398Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-10T12:22:10.669495Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7593707181474707753:2209], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-10T12:22:10.670510Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-10T12:22:10.670535Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.670546Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.670586Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-10T12:22:10.670612Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-10T12:22:10.677981Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:10.678024Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.678031Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.678037Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.678115Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1768047730636181 2026-01-10T12:22:10.678136Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047730636181 issue= 2026-01-10T12:22:10.678168Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1768047730636181 issue= 2026-01-10T12:22:10.678187Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-10T12:22:10.678258Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1768047730636181 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.680094Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-10T12:22:10.680153Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.693157Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707181474707771:2316], Recipient [1:7593707177179739541:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730636181&action=2" } UserToken: "" } 2026-01-10T12:22:10.693182Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:10.693318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730636181&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-10T12:22:13.215032Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.216875Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-10T12:22:13.219826Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.220643Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-10T12:22:13.221827Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-10T12:22:13.221864Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.222887Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.223151Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.223353Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-10T12:22:13.223433Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.223583Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-10T12:22:13.223627Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-10T12:22:13.223660Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:13.223733Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-10T12:22:13.223756Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.223813Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-10T12:22:13.223839Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.223880Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-10T12:22:13.223910Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-10T12:22:13.223925Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:13.223985Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-10T12:22:13.223998Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.224050Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-10T12:22:13.234387Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.234466Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:22:13.234511Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.234532Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:22:13.245156Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-10T12:22:13.245219Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-10T12:22:13.245243Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:13.245341Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.245379Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-10T12:22:13.245407Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.245425Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-10T12:22:13.245547Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-10T12:22:13.245592Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2026-01-10T12:22:09.499953Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707175413103954:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:09.500100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:09.732456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:09.782708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:09.782796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:09.796739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:09.850074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:09.897294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:10.019067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:10.019173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:10.019179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:10.019245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:10.397699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:10.509074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:10.553967Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2026-01-10T12:22:10.553990Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2026-01-10T12:22:10.557588Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2026-01-10T12:22:10.594466Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7593707179708072351:2303], Recipient [1:7593707175413104488:2210]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:54046" } 2026-01-10T12:22:10.594503Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2026-01-10T12:22:10.598335Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: 2026-01-10T12:18:23.788765Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:23.869655Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:23.869746Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:23.869827Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:23.869948Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:23.891164Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:23.919474Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-10T12:18:23.920532Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:23.922593Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:23.924519Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-10T12:18:23.925877Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-10T12:18:23.935525Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:23.935942Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5e142fea-1abdbb32-65d5d148-a67c4a81_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 100 2026-01-10T12:18:23.948230Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:23.948639Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4fa953-83cc82ff-e485b708-d932ffa4_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:23.965115Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:23.965606Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b5f2aaa-b124c73c-9202be0e-366a1ed3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-10T12:18:23.983645Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:23.984062Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|710c69d4-a9fede8b-7abd31a1-21c05324_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-10T12:18:23.985414Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:23.987755Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e3c68c0-1466f075-254f9be0-39e129d5_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-10T12:18:24.024927Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-10T12:18:24.025387Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1e7c7234-c7aea3c3-11a3bbd-3528cca2_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-10T12:18:24.501142Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-01-10T12:18:24.566233Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.566306Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.566399Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.566470Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:185:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:188:2057] recipient: [2:187:2197] Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:190:2057] recipient: [2:187:2197] 2026-01-10T12:18:24.606717Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:24.606782Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:24.606826Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:24.606867Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:189:2198] 2026-01-10T12:18:24.628050Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.690608Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.721767Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.732463Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.767951Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.824327Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.866482Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:24.988381Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.024347Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.311352Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.360326Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.695961Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:25.968395Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:18:26.046880Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:270:2057] recipient: [2:14:2061] 2026-01-10T12:18:26.239632Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:26.240549Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-10T12:18:26.241396Z node 2 :PERSQUEUE INFO: partition_init.c ... 2026-01-10T12:22:12.043697Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:589:2577] connected; active server actors: 1 2026-01-10T12:22:12.045750Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:594:2582] connected; active server actors: 1 2026-01-10T12:22:12.047842Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:599:2587] connected; active server actors: 1 2026-01-10T12:22:12.050072Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:604:2592] connected; active server actors: 1 2026-01-10T12:22:12.052288Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:609:2597] connected; active server actors: 1 2026-01-10T12:22:12.054632Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:614:2602] connected; active server actors: 1 2026-01-10T12:22:12.056835Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:619:2607] connected; active server actors: 1 2026-01-10T12:22:12.059017Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:624:2612] connected; active server actors: 1 2026-01-10T12:22:12.061224Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:629:2617] connected; active server actors: 1 2026-01-10T12:22:12.063401Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:634:2622] connected; active server actors: 1 2026-01-10T12:22:12.065714Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:639:2627] connected; active server actors: 1 2026-01-10T12:22:12.067826Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:644:2632] connected; active server actors: 1 2026-01-10T12:22:12.069869Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:649:2637] connected; active server actors: 1 2026-01-10T12:22:12.071677Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:654:2642] connected; active server actors: 1 2026-01-10T12:22:12.074017Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:659:2647] connected; active server actors: 1 2026-01-10T12:22:12.075969Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:664:2652] connected; active server actors: 1 2026-01-10T12:22:12.077989Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:669:2657] connected; active server actors: 1 2026-01-10T12:22:12.079812Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:674:2662] connected; active server actors: 1 2026-01-10T12:22:12.081614Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:679:2667] connected; active server actors: 1 2026-01-10T12:22:12.083500Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:684:2672] connected; active server actors: 1 2026-01-10T12:22:12.085168Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:689:2677] connected; active server actors: 1 2026-01-10T12:22:12.086752Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:694:2682] connected; active server actors: 1 2026-01-10T12:22:12.088265Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:699:2687] connected; active server actors: 1 2026-01-10T12:22:12.089927Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:704:2692] connected; active server actors: 1 2026-01-10T12:22:12.091676Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:709:2697] connected; active server actors: 1 2026-01-10T12:22:12.094150Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:714:2702] connected; active server actors: 1 2026-01-10T12:22:12.096658Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:719:2707] connected; active server actors: 1 2026-01-10T12:22:12.099007Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:724:2712] connected; active server actors: 1 2026-01-10T12:22:12.101557Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:729:2717] connected; active server actors: 1 2026-01-10T12:22:12.103942Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:734:2722] connected; active server actors: 1 2026-01-10T12:22:12.106477Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:739:2727] connected; active server actors: 1 2026-01-10T12:22:12.108866Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:744:2732] connected; active server actors: 1 2026-01-10T12:22:12.111249Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:749:2737] connected; active server actors: 1 2026-01-10T12:22:12.113822Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:754:2742], now have 1 active actors on pipe 2026-01-10T12:22:12.115306Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:757:2745], now have 1 active actors on pipe 2026-01-10T12:22:12.116529Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:760:2748], now have 1 active actors on pipe 2026-01-10T12:22:12.117770Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:763:2751] connected; active server actors: 1 2026-01-10T12:22:12.610024Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:22:12.676063Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:22:12.676171Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:22:12.676258Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:22:12.676348Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:22:12.731718Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:22:12.731815Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:22:12.731898Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:22:12.731985Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:22:12.732440Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:22:12.736402Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:22:12.737930Z node 157 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 163 actor [157:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 163 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 163 } Consumers { Name: "aaa" Generation: 163 Important: true } 2026-01-10T12:22:12.739130Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:249:2196] 2026-01-10T12:22:12.740620Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 3 [157:249:2196] 2026-01-10T12:22:12.742408Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:251:2196] 2026-01-10T12:22:12.743412Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 3 [157:251:2196] 2026-01-10T12:22:12.788130Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:22:12.788234Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:22:12.790109Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:22:12.790202Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:22:12.791339Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:327:2254] 2026-01-10T12:22:12.793021Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:329:2254] 2026-01-10T12:22:12.798693Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:22:12.799977Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 4 [157:327:2254] 2026-01-10T12:22:12.800310Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:22:12.800913Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 4 [157:329:2254] |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.624259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.624316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.636382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.636464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.636583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.644781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.647008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.650335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.654292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.661736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.662744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.671487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.671684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.671731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.671991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.796858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1677:3548], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10285, Billed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10285} 2026-01-10T12:22:12.673771Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-10T12:22:12.673890Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:22:12.673940Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:2330:4107] TestWaitNotification: OK eventTxId 107 2026-01-10T12:22:12.674622Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:22:12.674872Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 276us result status StatusSuccess 2026-01-10T12:22:12.675386Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "prefix" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 20 levels: 2 overlap_clusters: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:22:12.676138Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-01-10T12:22:12.676428Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" took 319us result status StatusSuccess 2026-01-10T12:22:12.677404Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 35 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:22:12.682604Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830210, Sender [6:3451:5176], Recipient [6:1755:3619]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "(\n (let range \'(\'(\'__ydb_parent (Null) (Void)) \'(\'key (Null) (Void)) ))\n (let columns \'(\'__ydb_parent))\n (let result (SelectRange \'__user__indexImplPostingTable range columns \'()))\n (return (AsList (SetResult \'Result result) ))\n )" } } ... posting table contains 10 rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2026-01-10T12:22:13.631421Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.632447Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-10T12:22:13.632791Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.633006Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2026-01-10T12:22:13.633056Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.633348Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.633476Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:13.633623Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-10T12:22:13.633850Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2026-01-10T12:22:13.633906Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.634045Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2026-01-10T12:22:13.634084Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.634169Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2026-01-10T12:22:13.634215Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.634297Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-10T12:22:13.634444Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-10T12:22:13.634506Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-10T12:22:13.634551Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:13.634651Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-10T12:22:13.634716Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-10T12:22:13.634739Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.634799Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-10T12:22:13.634817Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.634903Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-10T12:22:13.634936Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.634961Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-10T12:22:13.635015Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-10T12:22:13.635032Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.635079Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2026-01-10T12:22:13.635119Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-10T12:22:13.635314Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-10T12:22:13.635341Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.635385Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-10T12:22:13.635440Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:13.635514Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-10T12:22:13.635667Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-10T12:22:13.635689Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:13.635771Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-10T12:22:13.635823Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:13.635994Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-10T12:22:13.636036Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query [GOOD] >> LocalPartitionReader::Booting |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> LocalPartitionReader::Booting [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TxUsage::WriteToTopic_Demo_11_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2026-01-10T12:22:09.500824Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707176059500917:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:09.501153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:09.716355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:09.797275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:09.797389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:09.800724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:09.848521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:09.899264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:10.019053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:10.019102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:10.019117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:10.019191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:10.402742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:10.461764Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7593707180354469295:2301], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.461818Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-10T12:22:10.461835Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.461846Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.462043Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" 2026-01-10T12:22:10.463422Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1768047730462955) 2026-01-10T12:22:10.506059Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1768047730462955 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-10T12:22:10.506289Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-10T12:22:10.509663Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:10.511164Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-10T12:22:10.512837Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730462955&action=1" } } } 2026-01-10T12:22:10.513597Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.513694Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:22:10.513864Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:22:10.514246Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-10T12:22:10.514412Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:22:10.520470Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707180354469313:2303], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730462955&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-10T12:22:10.520506Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:10.520738Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047730462955&action=1" } } 2026-01-10T12:22:10.520845Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-10T12:22:10.520906Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.520985Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7593707180354469309:2206], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.521008Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:10.521025Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.521035Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.521080Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-10T12:22:10.521105Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-10T12:22:10.521159Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-10T12:22:10.523515Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:10.523544Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.523552Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.523559Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.523604Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-10T12:22:10.523640Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1768047730462955 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.525421Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:22:10.537143Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.537255Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-10T12:22:10.537267Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-10T12:22:10.551375Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:39602" 2026-01-10T12:22:10.554540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:22:10.557145Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-10T12:22:10.557234Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-10T12:22:10.560496Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-10T12:22:10.567279Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNo ... } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.793597Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593707180354469931:2377], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.793627Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:22:10.793698Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.794365Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707176059501263:2202], Recipient [1:7593707176059501399:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.794393Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.795026Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.798205Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593707180354469946:2378], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.798239Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:22:10.798314Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.798491Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707176059501263:2202], Recipient [1:7593707176059501399:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.798514Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.799072Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.802869Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593707180354469956:2379], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.802905Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:22:10.802939Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.803023Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707176059501263:2202], Recipient [1:7593707176059501399:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.803047Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.803682Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.804061Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2026-01-10T12:22:10.804097Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:22:10.804136Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.804243Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7593707180354469413:2206], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.804272Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-01-10T12:22:10.804317Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:10.804340Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:10.804393Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2026-01-10T12:22:10.804427Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1768047730462955 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:10.804489Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2904: Update database for /Root/users/user-1 confirmedsubdomain=2 2026-01-10T12:22:10.805439Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593707180354469970:2380], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.805471Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:22:10.805533Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.805677Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707176059501263:2202], Recipient [1:7593707176059501399:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.805702Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.806201Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.806961Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2026-01-10T12:22:10.807009Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:10.808194Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7593707180354469977:2381], Recipient [1:7593707176059501399:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39602" } 2026-01-10T12:22:10.808214Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-10T12:22:10.808242Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.808383Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707176059501263:2202], Recipient [1:7593707176059501399:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:10.808419Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:10.808956Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-10T12:22:10.972370Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:22:10.972966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:22:11.578971Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:11.583817Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:12.579665Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:13.028608Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:13.028889Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:13.030079Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7593707194072154643:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |93.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> AggregateStatistics::ShouldBePings >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table >> AggregateStatistics::ShouldBePings [GOOD] |93.0%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2026-01-10T12:22:14.922534Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:14.923063Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-10T12:22:15.026842Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-10T12:22:15.026904Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-10T12:22:15.026952Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-10T12:22:15.027514Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-10T12:22:15.027567Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:15.027643Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-10T12:22:15.027679Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-10T12:22:15.027717Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-10T12:22:15.027757Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.624259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.624310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.635199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.635271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.636051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.645094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.647014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.650341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.654261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.661671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.662691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.671453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.671640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.671674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.671841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.822558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.823575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.823714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.823804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.823876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.823936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.824693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DROP LOCK, path: /MyRoot/ServerLessDB/Table/test_index/indexImplTable 2026-01-10T12:22:14.216375Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976725762, status# StatusPathDoesNotExist 2026-01-10T12:22:14.216420Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549 2026-01-10T12:22:14.216484Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3152: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 107, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-10T12:22:14.216623Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3157: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500), SubscribersCount: 1, CreateSender: [6:1673:3544], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-10T12:22:14.216777Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-10T12:22:14.218248Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected 2026-01-10T12:22:14.218331Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/Table/test_index', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760, SubscribersCount: 1, CreateSender: [6:1673:3544], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:14.218382Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-10T12:22:14.218485Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:22:14.218520Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:1690:3561] TestWaitNotification: OK eventTxId 107 2026-01-10T12:22:14.219059Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-10T12:22:14.219286Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-10T12:22:14.219837Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:22:14.220034Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 216us result status StatusSuccess 2026-01-10T12:22:14.220461Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:22:14.221032Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 108 DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-10T12:22:14.221267Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:102: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 108 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 108 Status: SUCCESS 2026-01-10T12:22:14.223618Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" PageSize: 100 PageToken: "" 2026-01-10T12:22:14.223715Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table >> KqpProxy::InvalidSessionID >> TableCreation::MultipleTablesCreation >> TableCreation::ConcurrentTableCreation >> TableCreation::UpdateTableWithAclRollback >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> KqpProxy::PassErrroViaSessionActor >> TFlatTest::SplitEmptyTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:34.707234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:34.810074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:34.818105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:34.818555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:34.818650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:35.153871Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:35.252960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:35.253102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:35.287743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:35.368053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:36.002288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:36.003330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:36.003374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:36.003396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:36.003521Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:36.065812Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:36.611991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:39.196334Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:39.202250Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:39.204408Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:39.232782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:39.232929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:39.260720Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:39.262543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:39.436843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:39.436931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:39.438151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.438607Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.439027Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.439993Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.440425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.440686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.440848Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.441100Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.441259Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:39.457483Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:39.653852Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:39.688848Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:39.688961Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:39.714643Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:39.715895Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:39.716126Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:39.716174Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:39.716219Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:39.716292Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:39.716346Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:39.716384Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:39.716912Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:39.734141Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:39.734256Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:39.756334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:39.757244Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:39.808679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:39.811494Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:39.830734Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:39.830796Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:39.830875Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:39.841970Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:39.864833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:39.889229Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:39.889372Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:39.900009Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:40.082201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:40.094298Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:40.387472Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:40.526702Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:40.526789Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:41.211134Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... T12:22:14.763298Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-10T12:22:14.763330Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1768047734666655 2026-01-10T12:22:14.763364Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-10T12:22:14.763402Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-10T12:22:14.763439Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-10T12:22:14.763496Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-10T12:22:14.763570Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:14.763642Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:14.763693Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:22:14.763739Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:22:14.763785Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:14.763902Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:14.764727Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:14.765501Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:14.765559Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:14.765648Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5558:4770] Owner: [2:5557:4769]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:14.765692Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5558:4770] Owner: [2:5557:4769]. Column diff is empty, finishing 2026-01-10T12:22:14.766693Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:14.766753Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:14.768294Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:14.785626Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5567:4777] 2026-01-10T12:22:14.785981Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5567:4777], schemeshard id = 72075186224037897 2026-01-10T12:22:14.786075Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5524:4750], server id = [2:5568:4778], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:14.786242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5568:4778] 2026-01-10T12:22:14.786376Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5568:4778], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:14.848549Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:14.848715Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-10T12:22:14.850182Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5573:4783], server id = [2:5577:4787], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:14.850545Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5573:4783], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:14.850777Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5574:4784], server id = [2:5578:4788], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:14.850839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5574:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:14.851111Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5575:4785], server id = [2:5579:4789], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:14.851164Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5575:4785], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:14.851272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5576:4786], server id = [2:5580:4790], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:14.851316Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5576:4786], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:14.858271Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:14.859459Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5573:4783], server id = [2:5577:4787], tablet id = 72075186224037899 2026-01-10T12:22:14.859520Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:14.860003Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:14.860514Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5574:4784], server id = [2:5578:4788], tablet id = 72075186224037900 2026-01-10T12:22:14.860548Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:14.861342Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:14.861690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5575:4785], server id = [2:5579:4789], tablet id = 72075186224037901 2026-01-10T12:22:14.861721Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:14.861860Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:14.861909Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:14.862162Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:14.862316Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:14.862577Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5593:4799], ActorId: [2:5594:4800], Starting query actor #1 [2:5595:4801] 2026-01-10T12:22:14.862635Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5594:4800], ActorId: [2:5595:4801], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:14.866523Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5576:4786], server id = [2:5580:4790], tablet id = 72075186224037902 2026-01-10T12:22:14.866565Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:14.866963Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5594:4800], ActorId: [2:5595:4801], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MWVjMzg5ODUtYTNhZTlmZTYtODg1OGZhZmQtZWZlYWQ0Mzg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:14.973197Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5604:4810]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:14.973445Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:14.973489Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5604:4810], StatRequests.size() = 1 2026-01-10T12:22:15.094478Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5594:4800], ActorId: [2:5595:4801], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWVjMzg5ODUtYTNhZTlmZTYtODg1OGZhZmQtZWZlYWQ0Mzg=, TxId: 2026-01-10T12:22:15.094558Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5594:4800], ActorId: [2:5595:4801], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWVjMzg5ODUtYTNhZTlmZTYtODg1OGZhZmQtZWZlYWQ0Mzg=, TxId: 2026-01-10T12:22:15.094904Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5593:4799], ActorId: [2:5594:4800], Got response [2:5595:4801] SUCCESS 2026-01-10T12:22:15.095288Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:15.132913Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:15.132973Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:15.201350Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5623:4818]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:15.201786Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:15.201869Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:15.202185Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:15.202251Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:15.202334Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:15.206542Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2026-01-10T12:22:12.236895Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707189447609930:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:12.236957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:12.414347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:12.433914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:12.433992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:12.485037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:12.491124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:12.582156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:12.582204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:12.582215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:12.582354Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:12.610541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:12.779614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:12.927316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:12.972791Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7593707189447611043:2301], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:54748" } 2026-01-10T12:22:12.972849Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-10T12:22:12.972868Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:12.972883Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:12.973033Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:54748" 2026-01-10T12:22:12.973252Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1768047732973062) 2026-01-10T12:22:13.014611Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1768047732973062 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-10T12:22:13.014904Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-10T12:22:13.018516Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-10T12:22:13.019708Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047732973062&action=1" } } } 2026-01-10T12:22:13.019859Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.019957Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:22:13.020183Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:22:13.020658Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-10T12:22:13.020803Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:22:13.023314Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707193742578348:2302], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047732973062&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2026-01-10T12:22:13.023351Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:13.023581Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047732973062&action=1" } } 2026-01-10T12:22:13.025366Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-10T12:22:13.025426Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:13.025499Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7593707193742578344:2197], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:13.025530Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-10T12:22:13.025552Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.025563Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.025603Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-10T12:22:13.025623Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-10T12:22:13.025730Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-10T12:22:13.028556Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:13.028588Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.028600Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.028632Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.028703Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-10T12:22:13.028724Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1768047732973062 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-10T12:22:13.030717Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:22:13.042541Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.042633Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-10T12:22:13.042655Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-10T12:22:13.051311Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:54748" 2026-01-10T12:22:13.053221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:22:13.055927Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-10T12:22:13.056018Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710659 2026-01-10T12:22:13.058974Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdoma ... HEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-10T12:22:13.673088Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found - using supplied 72075186224037891 2026-01-10T12:22:13.673159Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found - using supplied 72075186224037892 2026-01-10T12:22:13.673243Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037895 2026-01-10T12:22:13.673294Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037889 2026-01-10T12:22:13.673325Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-01-10T12:22:13.674810Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2026-01-10T12:22:13.674832Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-10T12:22:13.674903Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:13.675049Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7593707193742579230:2197], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:13.675084Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-10T12:22:13.675127Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.675156Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.675202Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-10T12:22:13.675243Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1768047733644331 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.675302Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047733644331 issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.675930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-10T12:22:13.676015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-10T12:22:13.676055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-10T12:22:13.676147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-10T12:22:13.676195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-10T12:22:13.677305Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-10T12:22:13.680651Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-10T12:22:13.680724Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-10T12:22:13.680763Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.680957Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7593707189447610305:2221], Recipient [1:7593707189447610388:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-10T12:22:13.680984Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-10T12:22:13.681001Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.681011Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.681073Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-10T12:22:13.681108Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1768047733644331 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.683533Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-10T12:22:13.683613Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.683644Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-10T12:22:13.683818Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-10T12:22:13.684429Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2026-01-10T12:22:13.684528Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2026-01-10T12:22:13.687777Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2026-01-10T12:22:13.687890Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7593707193742579321:2197], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-10T12:22:13.687942Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-10T12:22:13.687967Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.687976Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.688019Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-10T12:22:13.688046Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-10T12:22:13.689077Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [97d50fd1dab3958c] Result# TEvPutResult {Id# [72075186224037888:1:17:0:0:150:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2026-01-10T12:22:13.689484Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.446 sample PartId# [72075186224037888:1:17:0:0:150:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2026-01-10T12:22:13.690740Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-10T12:22:13.690784Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-10T12:22:13.690793Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.690800Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-10T12:22:13.690869Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1768047733644331 2026-01-10T12:22:13.690898Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1768047733644331 issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.690922Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1768047733644331 issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.690973Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-10T12:22:13.691072Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1768047733644331 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-10T12:22:13.693137Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-10T12:22:13.693187Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-10T12:22:13.702695Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7593707193742579338:2407], Recipient [1:7593707189447610388:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047733644331&action=2" } UserToken: "" } 2026-01-10T12:22:13.702735Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-10T12:22:13.702964Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1768047733644331&action=2" ready: true status: SUCCESS } } 2026-01-10T12:22:13.707525Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-10T12:22:13.707729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-10T12:22:14.083677Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:15.783412Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.1%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_45_Query |93.1%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardScan::ScanFollowedByUpdate >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardTxOrder::ZigZag_oo |93.1%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true [GOOD] >> VectorIndexBuildTest::CancelBroken >> DataShardOutOfOrder::TestOutOfOrderLockLost >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> DataShardTxOrder::RandomPoints_DelayRS |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2026-01-10T12:22:18.578522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:18.579605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:18.579655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:18.590419Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:18.590769Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:18.591077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:18.639430Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:18.648937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:18.649650Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:18.651337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:18.651416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:18.651466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:18.651828Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:18.651909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:18.652009Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:18.723748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:18.750527Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:18.751644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:18.751751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:18.751794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:18.751821Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:18.751849Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.752026Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.752865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.753884Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:18.754007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:18.754076Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.754158Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:18.754914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:18.754953Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:18.754990Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:18.755014Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:18.755052Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:18.755145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.755186Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.755236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:18.762773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:18.762845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:18.762944Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:18.763212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:18.763291Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:18.763854Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:18.763929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:18.763959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:18.763991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:18.764018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.764343Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:18.764385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:18.764430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:18.764489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.764592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:18.764639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:18.764679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:18.764722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.764754Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:18.776763Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:18.776832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.776859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.776909Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:18.777770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:18.779916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.779971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.780021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:18.780118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:18.780161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:18.780303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.780345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:18.780380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:18.780411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:18.784299Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:18.784369Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.784591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.784627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.784689Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.784726Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:18.784762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:18.784805Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:18.784839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... on::Execute at 9437185 2026-01-10T12:22:20.647324Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:20.647364Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2026-01-10T12:22:20.647409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2026-01-10T12:22:20.647448Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2026-01-10T12:22:20.647490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-10T12:22:20.647526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2026-01-10T12:22:20.647575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2026-01-10T12:22:20.647602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2026-01-10T12:22:20.647797Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is DelayComplete 2026-01-10T12:22:20.647826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2026-01-10T12:22:20.647858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2026-01-10T12:22:20.647892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2026-01-10T12:22:20.647923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-10T12:22:20.647945Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2026-01-10T12:22:20.647973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437185 has finished 2026-01-10T12:22:20.648028Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:20.648095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:22:20.648136Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-10T12:22:20.648167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:22:20.648331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:459:2401], Recipient [1:459:2401]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:20.648362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:20.648403Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-10T12:22:20.648431Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:20.648459Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2026-01-10T12:22:20.648482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2026-01-10T12:22:20.648517Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2026-01-10T12:22:20.648565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-10T12:22:20.648610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2026-01-10T12:22:20.648657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2026-01-10T12:22:20.648687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2026-01-10T12:22:20.648833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is DelayComplete 2026-01-10T12:22:20.648860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2026-01-10T12:22:20.648883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2026-01-10T12:22:20.648907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2026-01-10T12:22:20.648939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-10T12:22:20.648960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2026-01-10T12:22:20.648981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437186 has finished 2026-01-10T12:22:20.649068Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:20.649095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:22:20.649150Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-10T12:22:20.649188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-10T12:22:20.649331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:20.649363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:20.649418Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:20.649449Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:20.649475Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2026-01-10T12:22:20.649498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2026-01-10T12:22:20.649522Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2026-01-10T12:22:20.649549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-10T12:22:20.649572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2026-01-10T12:22:20.649606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2026-01-10T12:22:20.649637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2026-01-10T12:22:20.649765Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is DelayComplete 2026-01-10T12:22:20.649805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2026-01-10T12:22:20.649831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2026-01-10T12:22:20.649857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2026-01-10T12:22:20.649893Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-10T12:22:20.649913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2026-01-10T12:22:20.649933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437184 has finished 2026-01-10T12:22:20.649955Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:20.649976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:20.650006Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:20.650036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:20.663058Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:22:20.663146Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:22:20.663186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2026-01-10T12:22:20.663261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:20.663319Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-10T12:22:20.664359Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:20.664404Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:20.664429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2026-01-10T12:22:20.664471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:20.664507Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:20.664641Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:20.664681Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:20.664714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2026-01-10T12:22:20.664756Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:20.664786Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder >> VectorIndexBuildTest::CancelBroken [GOOD] >> VectorIndexBuildTest::CreateBuildProposeReject >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2026-01-10T12:19:38.632632Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706526021067921:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:19:38.632713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:19:38.839267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:19:38.848059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:19:38.848154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:19:38.897370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:19:38.920915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706526021067898:2081] 1768047578631910 != 1768047578631913 2026-01-10T12:19:38.935692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:19:39.115357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:19:39.146763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:19:39.213951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:19:39.389926Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-10T12:19:39.394640Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-10T12:19:39.418203Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-10T12:19:39.424804Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-10T12:19:39.641557Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:19:41.768693Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.93, eph 1} end=Done, 2 blobs 208r (max 208), put Spent{time=0.020s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (60349 0 0)b }, ecr=1.000 2026-01-10T12:19:41.853775Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.114, eph 1} end=Done, 2 blobs 753r (max 753), put Spent{time=0.012s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (49423 0 0)b }, ecr=1.000 2026-01-10T12:19:41.905083Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.590, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-10T12:19:41.963744Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.194, eph 2} end=Done, 2 blobs 459r (max 460), put Spent{time=0.011s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (132881 0 0)b }, ecr=1.000 2026-01-10T12:19:42.023012Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.211, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.047s,wait=0.045s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-10T12:19:42.032283Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.212, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.055s,wait=0.051s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-10T12:19:42.033609Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.213, eph 1} end=Done, 2 blobs 503r (max 503), put Spent{time=0.053s,wait=0.037s,interrupts=1} Part{ 1 pk, lobs 0 +0, (32092 0 0)b }, ecr=1.000 2026-01-10T12:19:42.077288Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.214, eph 1} end=Done, 2 blobs 1506r (max 1506), put Spent{time=0.012s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103478 0 0)b }, ecr=1.000 2026-01-10T12:19:42.079149Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.733, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.083s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-10T12:19:42.093956Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.215, eph 2} end=Done, 2 blobs 1509r (max 1512), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (98941 0 0)b }, ecr=1.000 2026-01-10T12:19:42.153054Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.301, eph 3} end=Done, 2 blobs 710r (max 711), put Spent{time=0.028s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (205368 0 0)b }, ecr=1.000 2026-01-10T12:19:42.181011Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.324, eph 3} end=Done, 2 blobs 2262r (max 2265), put Spent{time=0.024s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (148264 0 0)b }, ecr=1.000 2026-01-10T12:19:42.250910Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1097, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-10T12:19:42.332057Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.425, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.018s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-10T12:19:42.333568Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.408, eph 4} end=Done, 2 blobs 963r (max 964), put Spent{time=0.040s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (278474 0 0)b }, ecr=1.000 2026-01-10T12:19:42.339146Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.427, eph 2} end=Done, 2 blobs 1005r (max 1005), put Spent{time=0.026s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63918 0 0)b }, ecr=1.000 2026-01-10T12:19:42.340990Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.426, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.027s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-10T12:19:42.365845Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.428, eph 4} end=Done, 2 blobs 3015r (max 3018), put Spent{time=0.053s,wait=0.014s,interrupts=1} Part{ 1 pk, lobs 0 +0, (197587 0 0)b }, ecr=1.000 2026-01-10T12:19:42.428001Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.424, eph 2} end=Done, 2 blobs 3012r (max 3012), put Spent{time=0.114s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206768 0 0)b }, ecr=1.000 2026-01-10T12:19:42.458578Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1241, eph 2} end=Done, 2 blobs 10001r (max 10503), put Spent{time=0.118s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-10T12:19:42.483271Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.521, eph 5} end=Done, 2 blobs 1217r (max 1217), put Spent{time=0.024s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (351822 0 0)b }, ecr=1.000 2026-01-10T12:19:42.498034Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.536, eph 5} end=Done, 2 blobs 3768r (max 3771), put Spent{time=0.021s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (246910 0 0)b }, ecr=1.000 2026-01-10T12:19:42.533653Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1606, eph 3} end=Done, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-10T12:19:42.607841Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.630, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.020s,wait=0.015s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-10T12:19:42.609196Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.631, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.021s,wait=0.016s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-10T12:19:42.621599Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.617, eph 6} end=Done, 2 blobs 1468r (max 1469), put Spent{time=0.049s,wait=0.025s,interrupts=1} Part{ 1 pk, lobs 0 +0, (424354 0 0)b }, ecr=1.000 2026-01-10T12:19:42.643434Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.629, eph 3} end=Done, 2 blobs 4515r (max 4515), put Spent{time=0.056s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (309854 0 0)b }, ecr=1.000 2026-01-10T12:19:42.643817Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.634, eph 6} end=Done, 2 blobs 4521r (max 4524), put Spent{time=0.056s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (296233 0 0)b }, ecr=1.000 2026-01-10T12:19:42.645753Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.632, eph 3} end=Done, 2 blobs 1506r (max 1506), put Spent{time=0.057s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95731 0 0)b }, ecr=1.000 2026-01-10T12:19:42.693493Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1749, eph 3} end=Done, 2 blobs 10001r (max 10503), put Spent{time=0.100s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-10T12:19:42.704293Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.706, eph 7} end=Done, 2 blobs 1719r (max 1720), put Spent{time=0.031s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (496841 0 0)b }, ecr=1.000 2026-01-10T12:19:42.735518Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.719, eph 7} end=Done, 2 blobs 5274r (max 5277), put Spent{time=0.042s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (345556 0 0)b }, ecr=1.000 2026-01-10T12:19:42.737927Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.2114, eph 4} end=Done, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-10T12:19:42.829109Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.796, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.029s,wait=0.028s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-10T12:19:42.842088Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.797, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.042s,wait=0.040s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-10T12:19:42.844527Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.784, eph 8} end=Done, 2 blobs 1970r (max 1971), put Spent{time=0.064s,wait=0.031s,interrupts=1} Part{ 1 pk, lobs 0 +0, (569373 0 0)b }, ecr=1.000 2026-01-10T12:19:42.847327Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.795, eph 4} end=Done, 2 blobs 6018r (max 6018), put Spent{time=0.048s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (413003 0 0)b }, ecr=1.000 2026-01-10T12:19:42.860399Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.798, eph 4} end=Done, 2 blobs 2007r (max 2007), put Spent{ti ... WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:22:13.430476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707187915451900 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-10T12:22:13.430533Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.430723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707192210419501 RawX2: 4503608217307433 } TabletId: 72075186224037890 State: 4 2026-01-10T12:22:13.430743Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.430814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707192210419686 RawX2: 4503608217307454 } TabletId: 72075186224037893 State: 4 2026-01-10T12:22:13.430834Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.430917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707192210419683 RawX2: 4503608217307453 } TabletId: 72075186224037895 State: 4 2026-01-10T12:22:13.430945Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.431064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707192210419680 RawX2: 4503608217307452 } TabletId: 72075186224037894 State: 4 2026-01-10T12:22:13.431096Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.431231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707192210419512 RawX2: 4503608217307435 } TabletId: 72075186224037891 State: 4 2026-01-10T12:22:13.431264Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:22:13.431428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.431586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.431663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.431723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.431790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.431853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:22:13.431871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:22:13.433191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-10T12:22:13.433399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-10T12:22:13.433547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:22:13.433680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-10T12:22:13.433783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-10T12:22:13.433914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-10T12:22:13.434036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-01-10T12:22:13.434139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-10T12:22:13.434224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-10T12:22:13.434315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-10T12:22:13.434384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:22:13.434490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-10T12:22:13.436161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:22:13.436195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-10T12:22:13.436239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:22:13.437054Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-10T12:22:13.438387Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-10T12:22:13.438420Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-10T12:22:13.438438Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-10T12:22:13.438454Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-01-10T12:22:13.438471Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2026-01-10T12:22:13.438721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-10T12:22:13.438740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-10T12:22:13.438764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:22:13.438771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:22:13.438784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-10T12:22:13.438789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-10T12:22:13.438799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2026-01-10T12:22:13.438805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-01-10T12:22:13.438816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-10T12:22:13.438821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-10T12:22:13.438922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:22:13.438939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:22:13.439181Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:42.594686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:42.672272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:42.678217Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:42.678517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:42.678582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:42.979583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:43.077488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.077588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.110788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.162554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:43.792400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:43.793303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:43.793340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:43.793370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:43.793488Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:43.856669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:44.387122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:46.793593Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.799829Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:46.801994Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:46.827863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.827953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.854995Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:46.856821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.031137Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.031213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.032318Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.032798Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.033208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.033897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.034210Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.034397Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.034595Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.034741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.034820Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.049508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.247908Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.277902Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:47.277976Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:47.302601Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:47.303832Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:47.304044Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:47.304106Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:47.304149Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:47.304192Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:47.304237Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:47.304275Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:47.304825Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:47.321805Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.322016Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.345599Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:47.346689Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:47.399149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:47.401692Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:47.414568Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:47.414624Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:47.414696Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:47.422977Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:47.439649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.462699Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:47.462805Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:47.471103Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:47.529733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:47.643409Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:47.874301Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:48.012502Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:48.012575Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:48.689410Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:22:18.766663Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5439:4699], schemeshard count = 1 2026-01-10T12:22:19.946407Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:22:19.946488Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:22:19.946540Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:19.951261Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:19.982573Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:19.983250Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:19.983342Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:19.984634Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:20.010081Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:20.010406Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:22:20.011467Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5521:4744], server id = [2:5525:4748], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:20.011847Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5521:4744], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:20.012318Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5522:4745], server id = [2:5526:4749], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:20.012379Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5522:4745], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:20.013216Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5523:4746], server id = [2:5528:4751], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:20.013275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5523:4746], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:20.013983Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5524:4747], server id = [2:5527:4750], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:20.014035Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5524:4747], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:20.019280Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:20.019834Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5521:4744], server id = [2:5525:4748], tablet id = 72075186224037899 2026-01-10T12:22:20.019883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:20.021311Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:20.021975Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5522:4745], server id = [2:5526:4749], tablet id = 72075186224037900 2026-01-10T12:22:20.022007Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:20.022711Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:20.023109Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5523:4746], server id = [2:5528:4751], tablet id = 72075186224037901 2026-01-10T12:22:20.023137Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:20.023386Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:20.023433Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:20.023617Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:20.023828Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:20.024199Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5541:4760], ActorId: [2:5542:4761], Starting query actor #1 [2:5543:4762] 2026-01-10T12:22:20.024259Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5542:4761], ActorId: [2:5543:4762], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:20.026617Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5524:4747], server id = [2:5527:4750], tablet id = 72075186224037902 2026-01-10T12:22:20.026650Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:20.027307Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5542:4761], ActorId: [2:5543:4762], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjA1NjIxZWUtYjliMjRjODEtZTQwMDExODQtODVjNDZjNjA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:20.138543Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5552:4771]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:20.138774Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:20.138814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5552:4771], StatRequests.size() = 1 2026-01-10T12:22:20.248833Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5542:4761], ActorId: [2:5543:4762], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjA1NjIxZWUtYjliMjRjODEtZTQwMDExODQtODVjNDZjNjA=, TxId: 2026-01-10T12:22:20.248890Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5542:4761], ActorId: [2:5543:4762], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA1NjIxZWUtYjliMjRjODEtZTQwMDExODQtODVjNDZjNjA=, TxId: 2026-01-10T12:22:20.249206Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5541:4760], ActorId: [2:5542:4761], Got response [2:5543:4762] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:22:20.249569Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:20.250616Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2822:3029];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-10T12:22:20.282547Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:20.282624Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:20.344766Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5572:4781];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2026-01-10T12:22:20.354101Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2026-01-10T12:22:20.354463Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-10T12:22:20.354695Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-10T12:22:20.611919Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5683:4874]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:20.612458Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:20.612562Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:20.612964Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:20.613054Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:20.613119Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:20.617627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query >> DataShardTxOrder::ReadWriteReorder [GOOD] |93.1%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::ZigZag >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] |93.1%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> TableCreation::UpdateTableWithAclRollback [GOOD] >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:42.526871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:42.620306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:42.625491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:42.625747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:42.625798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:42.921920Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:43.019151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.019250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.052470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.115091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:43.725416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:43.726316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:43.726346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:43.726367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:43.726468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:43.788007Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:44.264250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:46.574689Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.579802Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:46.581631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:46.607209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.607369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.635568Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:46.637501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:46.826347Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.826456Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.827759Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.828312Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.828923Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.829767Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.830186Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.830404Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.830569Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.830778Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.830878Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.845750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.042521Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.073737Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:47.073805Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:47.097821Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:47.098878Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:47.099022Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:47.099067Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:47.099098Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:47.099139Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:47.099177Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:47.099208Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:47.099671Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:47.115555Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.115636Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.134448Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:47.135191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:47.181517Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:47.183734Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:47.197140Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:47.197187Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:47.197254Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:47.205828Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:47.222264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.240257Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:47.240340Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:47.248201Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:47.422543Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:47.540559Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:47.697770Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:47.838223Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:47.838321Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:48.585304Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... T12:22:20.994638Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-10T12:22:20.994687Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1768047740856448 2026-01-10T12:22:20.994732Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-10T12:22:20.994778Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-10T12:22:20.994821Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-10T12:22:20.994910Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-10T12:22:20.994978Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:20.995080Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:20.995149Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:22:20.995212Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:22:20.995278Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:20.995445Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:20.996713Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:20.997075Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5576:4781] Owner: [2:5575:4780]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:20.997174Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5576:4781] Owner: [2:5575:4780]. Column diff is empty, finishing 2026-01-10T12:22:20.997785Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:20.997872Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:20.999366Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:20.999457Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:21.001996Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:21.020973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5585:4788] 2026-01-10T12:22:21.021311Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5585:4788], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:21.021470Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5541:4760], server id = [2:5585:4788], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:21.021675Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5586:4789] 2026-01-10T12:22:21.021818Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5586:4789], schemeshard id = 72075186224037897 2026-01-10T12:22:21.104657Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:21.104883Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-10T12:22:21.105523Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5591:4794], server id = [2:5595:4798], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:21.105630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5591:4794], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:21.105982Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5593:4796], server id = [2:5597:4800], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:21.106038Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5593:4796], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:21.106721Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5592:4795], server id = [2:5596:4799], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:21.106776Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5592:4795], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:21.107327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5594:4797], server id = [2:5598:4801], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:21.107370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5594:4797], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:21.108204Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:21.108587Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:21.108895Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5591:4794], server id = [2:5595:4798], tablet id = 72075186224037899 2026-01-10T12:22:21.108940Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:21.109253Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5593:4796], server id = [2:5597:4800], tablet id = 72075186224037901 2026-01-10T12:22:21.109277Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:21.109518Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:21.109691Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:21.109723Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:21.109855Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5592:4795], server id = [2:5596:4799], tablet id = 72075186224037900 2026-01-10T12:22:21.109874Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:21.110012Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:21.110141Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:21.110354Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5603:4806], ActorId: [2:5604:4807], Starting query actor #1 [2:5605:4808] 2026-01-10T12:22:21.110407Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5604:4807], ActorId: [2:5605:4808], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:21.113110Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5594:4797], server id = [2:5598:4801], tablet id = 72075186224037902 2026-01-10T12:22:21.113153Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:21.113781Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5604:4807], ActorId: [2:5605:4808], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTE2MmRlZmMtNDhkNjY5ZTUtN2Y2ZjU0MjAtYjhjNjBmMmM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:21.205748Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5614:4817]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:21.205996Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:21.206047Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5614:4817], StatRequests.size() = 1 2026-01-10T12:22:21.341429Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5604:4807], ActorId: [2:5605:4808], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTE2MmRlZmMtNDhkNjY5ZTUtN2Y2ZjU0MjAtYjhjNjBmMmM=, TxId: 2026-01-10T12:22:21.341502Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5604:4807], ActorId: [2:5605:4808], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTE2MmRlZmMtNDhkNjY5ZTUtN2Y2ZjU0MjAtYjhjNjBmMmM=, TxId: 2026-01-10T12:22:21.341844Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5603:4806], ActorId: [2:5604:4807], Got response [2:5605:4808] SUCCESS 2026-01-10T12:22:21.342152Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:21.356714Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:21.356787Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:21.424162Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5633:4825]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:21.424640Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:21.424729Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:21.424983Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:21.425024Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:21.425076Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:21.428332Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2026-01-10T12:22:21.895678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:21.896620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:21.896661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:21.904560Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:21.904903Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:21.905191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:21.937398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:21.946711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:21.947250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:21.948723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:21.948783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:21.948821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:21.949135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:21.949195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:21.949251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:22.025092Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:22.053262Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:22.053448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:22.053557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:22.053642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:22.053689Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:22.053741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:22.054005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.054062Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.054436Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:22.054561Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:22.054635Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:22.054691Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:22.054733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:22.054775Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:22.054841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:22.054881Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:22.054931Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:22.055033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.055069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.055117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:22.062505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:22.062610Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:22.062730Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:22.062964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:22.063039Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:22.063104Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:22.063165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:22.063210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:22.063252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:22.063326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:22.063689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:22.063743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:22.063793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:22.063836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:22.063907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:22.063946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:22.063988Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:22.064020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:22.064047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:22.075986Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:22.076043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:22.076100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:22.076151Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:22.076210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:22.076702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.076751Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.076797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:22.076871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:22.076898Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:22.077015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:22.077051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:22.077083Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:22.077128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:22.080614Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:22.080673Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:22.080841Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.080870Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.080910Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:22.080945Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:22.080973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:22.081009Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:22.081038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000001: ... ions 2026-01-10T12:22:22.991538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:22:22.991738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.991775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.991820Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:22.991853Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:22.991880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:22.991913Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2026-01-10T12:22:22.991946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2026-01-10T12:22:22.991982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.992006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2026-01-10T12:22:22.992033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2026-01-10T12:22:22.992056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2026-01-10T12:22:22.992821Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2026-01-10T12:22:22.992870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.992897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2026-01-10T12:22:22.992921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-10T12:22:22.992961Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2026-01-10T12:22:22.993000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-10T12:22:22.993046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:22.993151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:22:22.993218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:12] is the new logically complete end at 9437184 2026-01-10T12:22:22.993257Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:12] is the new logically incomplete end at 9437184 2026-01-10T12:22:22.993285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:12] at 9437184 2026-01-10T12:22:22.993325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:22.993390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2026-01-10T12:22:22.993415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2026-01-10T12:22:22.993462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993485Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2026-01-10T12:22:22.993512Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2026-01-10T12:22:22.993535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2026-01-10T12:22:22.993572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2026-01-10T12:22:22.993630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2026-01-10T12:22:22.993675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2026-01-10T12:22:22.993706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2026-01-10T12:22:22.993763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2026-01-10T12:22:22.993790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2026-01-10T12:22:22.993818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2026-01-10T12:22:22.993863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:22:22.993902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BlockFailPoint 2026-01-10T12:22:22.993927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.993949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:22:22.993968Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:22:22.993994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2026-01-10T12:22:22.994441Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2026-01-10T12:22:22.994500Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:22.994545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.994625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:22:22.994665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2026-01-10T12:22:22.994697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2026-01-10T12:22:22.994847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is DelayComplete 2026-01-10T12:22:22.994875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2026-01-10T12:22:22.994903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2026-01-10T12:22:22.994931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2026-01-10T12:22:22.994970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-10T12:22:22.995001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2026-01-10T12:22:22.995026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:12] at 9437184 has finished 2026-01-10T12:22:23.026281Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:23.026352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:23.026401Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:23.026468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:23.039113Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2026-01-10T12:22:23.039186Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-10T12:22:23.039267Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:23.039303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2026-01-10T12:22:23.039369Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:23.039413Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:23.040368Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2026-01-10T12:22:23.040407Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2026-01-10T12:22:23.040442Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:22:23.040466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2026-01-10T12:22:23.040528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:22:23.040566Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy >> DataShardOutOfOrder::UncommittedReadSetAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2026-01-10T12:22:18.586028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:18.586095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:18.586786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:18.601921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:18.602351Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:18.602627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:18.652580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:18.662054Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:18.662648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:18.664482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:18.664572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:18.664635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:18.665001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:18.665168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:18.665272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:22:18.740163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:18.773882Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:18.774109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:18.774216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:22:18.774261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:18.774317Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:18.774357Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.774626Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.774701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.775079Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:18.775198Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:18.775271Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.775329Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:18.775372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:18.775412Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:18.775449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:18.775501Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:18.775546Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:18.775659Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.775706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.775774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:22:18.786393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:18.786485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:18.786597Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:18.786786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:18.786835Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:18.786923Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:18.786986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:18.787026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:18.787066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:18.787102Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.787400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:18.787455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:18.787500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:18.787539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.787590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:18.787641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:18.787679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:18.787717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.787743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:18.799859Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:18.799931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.799965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.800028Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:18.800133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:18.800631Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.800687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.800746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:22:18.800885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:18.800921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:18.801041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.801083Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:18.801125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:18.801164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:18.809534Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:18.809624Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.809889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.809941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.810007Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.810054Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:18.810096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:18.810140Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:18.810185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 4Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:23.475872Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-10T12:22:23.475939Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-10T12:22:23.476017Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-10T12:22:23.476116Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:23.476246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:22:23.476303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.476380Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-10T12:22:23.476484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:23.476523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.476558Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-10T12:22:23.476706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:22:23.476751Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.476783Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-10T12:22:23.476868Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:23.476899Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.476939Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:23.477055Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:458:2400], Recipient [1:458:2400]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:23.477132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:23.477198Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-10T12:22:23.477249Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:23.477289Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-10T12:22:23.477330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-10T12:22:23.477371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:23.477408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-10T12:22:23.477439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-10T12:22:23.477475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-10T12:22:23.477509Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:23.477554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-10T12:22:23.477590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-10T12:22:23.477619Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-10T12:22:23.478233Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-10T12:22:23.478323Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:23.478387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-10T12:22:23.478418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-10T12:22:23.478449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-10T12:22:23.478478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:23.478693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-10T12:22:23.478753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-10T12:22:23.478789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-10T12:22:23.478831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-10T12:22:23.478876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:23.478902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-10T12:22:23.478952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-10T12:22:23.478983Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:23.479010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:22:23.479075Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-10T12:22:23.479106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-10T12:22:23.479310Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:22:23.479356Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.479398Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-10T12:22:23.479460Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:23.479482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.479504Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-10T12:22:23.479616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:22:23.479653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.479677Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-10T12:22:23.479774Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:23.479808Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.479866Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:22:23.494397Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:23.494481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:23.494542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-10T12:22:23.494621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:23.494696Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:23.495047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:23.495127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.495165Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:49.114097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:49.187770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:49.193723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:49.194003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:49.194056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:49.509512Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:49.589435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:49.589528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:49.623065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:49.702314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:50.302122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:50.303138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:50.303182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:50.303208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:50.303347Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:50.365001Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:50.886110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:53.165249Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:53.170598Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:53.172363Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:53.194105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:53.194206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:53.220723Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:53.222182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:53.389307Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:53.389384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:53.390379Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.390770Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.391224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.391823Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.392029Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.392202Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.392314Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.392465Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.392532Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:53.406828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:53.566855Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:53.594428Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:53.594506Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:53.618754Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:53.619975Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:53.620197Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:53.620252Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:53.620297Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:53.620358Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:53.620411Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:53.620468Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:53.621032Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:53.635669Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:53.635798Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:53.653524Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:53.654338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:53.700233Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:53.702641Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:53.715777Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:53.715833Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:53.715899Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:53.724365Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:53.741165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:53.759189Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:53.759286Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:53.766920Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:53.928043Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:53.945065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:54.203994Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:54.343886Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:54.343956Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:55.026886Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... TU3MjM=, TxId: 2026-01-10T12:22:20.930810Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5422:4691], ActorId: [2:5423:4692], Got response [2:5424:4693] SUCCESS 2026-01-10T12:22:20.931579Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:20.946577Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-10T12:22:20.946650Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:21.074284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:22:21.074372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:22:21.129923Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5435:4704], schemeshard count = 1 2026-01-10T12:22:22.047011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-10T12:22:22.047093Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-10T12:22:22.047143Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:22.052633Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:22.083215Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:22.083849Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:22.083942Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:22.085076Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:22.110049Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:22.110368Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:22:22.111096Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5517:4749], server id = [2:5521:4753], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:22.111440Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5517:4749], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:22.111815Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5518:4750], server id = [2:5522:4754], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:22.111876Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5518:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:22.112592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5519:4751], server id = [2:5523:4755], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:22.112635Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5519:4751], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:22.114057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5520:4752], server id = [2:5525:4757], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:22.114103Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5520:4752], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:22.117987Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:22.118426Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5517:4749], server id = [2:5521:4753], tablet id = 72075186224037899 2026-01-10T12:22:22.118476Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:22.118910Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:22.119119Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5518:4750], server id = [2:5522:4754], tablet id = 72075186224037900 2026-01-10T12:22:22.119141Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:22.120584Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:22.120800Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5519:4751], server id = [2:5523:4755], tablet id = 72075186224037901 2026-01-10T12:22:22.120829Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:22.121147Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:22.121210Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:22.121381Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:22.121495Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:22.121881Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5520:4752], server id = [2:5525:4757], tablet id = 72075186224037902 2026-01-10T12:22:22.121906Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:22.122334Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:22.147574Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:22.147879Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-10T12:22:22.148696Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5539:4767], server id = [2:5540:4768], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:22.148799Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5539:4767], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:22.150543Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:22.150652Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:22.150941Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5539:4767], server id = [2:5540:4768], tablet id = 72075186224037900 2026-01-10T12:22:22.150979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:22.151068Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:22.151263Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:22.151733Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5542:4770], ActorId: [2:5543:4771], Starting query actor #1 [2:5544:4772] 2026-01-10T12:22:22.151802Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5543:4771], ActorId: [2:5544:4772], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:22.155029Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5543:4771], ActorId: [2:5544:4772], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTFkMDFhYzQtNTY0ZDYzZmItZjdiZDJkMjEtYzExZDc4MTk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:22.272398Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5553:4781]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:22.272749Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:22.272805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5553:4781], StatRequests.size() = 1 2026-01-10T12:22:22.441190Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5543:4771], ActorId: [2:5544:4772], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTFkMDFhYzQtNTY0ZDYzZmItZjdiZDJkMjEtYzExZDc4MTk=, TxId: 2026-01-10T12:22:22.441271Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5543:4771], ActorId: [2:5544:4772], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTFkMDFhYzQtNTY0ZDYzZmItZjdiZDJkMjEtYzExZDc4MTk=, TxId: 2026-01-10T12:22:22.441655Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5542:4770], ActorId: [2:5543:4771], Got response [2:5544:4772] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:22:22.442156Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5566:4787]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:22.442416Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:22.443063Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:22.443160Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:22.443577Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:22.443637Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:22.443697Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:22.448154Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> DataShardTxOrder::DelayData >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2026-01-10T12:22:18.583768Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:18.585019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:18.585074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:18.596542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:18.596965Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:18.597294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:18.647900Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:18.657608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:18.658252Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:18.659851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:18.659927Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:18.659978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:18.660320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:18.660382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:18.660444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:18.735025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:18.757283Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:18.757425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:18.757503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:18.757541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:18.757569Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:18.757594Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.757810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.757867Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.758274Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:18.758420Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:18.758495Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.758543Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:18.758588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:18.758630Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:18.758688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:18.758730Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:18.758785Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:18.758879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.758904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.758966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:18.764630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:18.764708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:18.764789Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:18.764907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:18.764959Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:18.765007Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:18.765062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:18.765109Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:18.765133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:18.765156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.765387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:18.765423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:18.765456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:18.765482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.765527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:18.765552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:18.765576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:18.765597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.765612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:18.777264Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:18.777315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.777340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.777395Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:18.777772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:18.780100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.780177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.780244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:18.780373Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:18.780421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:18.780554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.780582Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-10T12:22:18.780609Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:18.780631Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:18.783126Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:18.783179Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.783341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.783377Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.783422Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.783452Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:18.783478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:18.783510Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-10T12:22:18.783534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-10T12:22:18. ... latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.588156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:22:23.588181Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:23.588307Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:23.588341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2026-01-10T12:22:23.588381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.588427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:23.588451Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:23.588545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:23.588569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:23.588599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.588662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:23.588695Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:23.588927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-10T12:22:23.588974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589015Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-10T12:22:23.589238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:22:23.589266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589296Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-10T12:22:23.589365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:22:23.589395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589420Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-10T12:22:23.589484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:22:23.589509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589534Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-10T12:22:23.589624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:22:23.589661Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589689Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-10T12:22:23.589774Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:23.589824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589851Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:22:23.589936Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:23.589962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.589986Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-10T12:22:23.590039Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:23.590067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2026-01-10T12:22:23.590110Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.590172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:23.590225Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:23.590367Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:23.590393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2026-01-10T12:22:23.590438Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.590492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:23.590516Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:23.590629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:23.590651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2026-01-10T12:22:23.590685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.590722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:23.590746Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:23.590861Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:23.590890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2026-01-10T12:22:23.590922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:23.590968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:23.591007Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:23.591177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:23.591209Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.591241Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-10T12:22:23.591375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:23.591410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.591436Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-10T12:22:23.591512Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:23.591541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.591566Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:23.591657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:23.591683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:23.591708Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] Test command err: 2026-01-10T12:22:16.487995Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707204006094917:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:16.488072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:16.686762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:16.686916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:16.693839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:16.821551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:17.106989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:17.107021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:17.107035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:17.107127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:17.301214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:17.494235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:19.260197Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.265805Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:22:19.265847Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:22:19.265863Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.267332Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.267363Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Creating table 2026-01-10T12:22:19.267407Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:22:19.267650Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.267677Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Creating table 2026-01-10T12:22:19.267696Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:19.268204Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.268222Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Creating table 2026-01-10T12:22:19.268244Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:22:19.271730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.274632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.276430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.282031Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:22:19.282041Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:22:19.282086Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Subscribe on create table tx: 281474976710658 2026-01-10T12:22:19.282093Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Subscribe on create table tx: 281474976710659 2026-01-10T12:22:19.282196Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:22:19.282212Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Subscribe on create table tx: 281474976710660 2026-01-10T12:22:19.284955Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:22:19.284975Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:22:19.284984Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:22:19.375008Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:22:19.402510Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:22:19.406331Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593707216890997599:2487] Owner: [1:7593707216890997598:2486]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:22:19.427314Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:22:19.427367Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Column diff is empty, finishing 2026-01-10T12:22:19.429376Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:19.430507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.431611Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.431634Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7593707216890997600:2488] Owner: [1:7593707216890997598:2486]. Successful alter request: ExecComplete 2026-01-10T12:22:19.477875Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-10T12:22:19.477919Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Column diff is empty, finishing 2026-01-10T12:22:19.477996Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:22:19.478885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.479767Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593707216890997601:2489] Owner: [1:7593707216890997598:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710662 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.479807Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Tab ... DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7593707225480933026:2917], ActorId: [1:7593707225480933027:2918], TraceId: ExecutionId: 517a1367-1e0e647d-ae72f623-6c341174, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-10T12:22:21.863371Z node 1 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7593707225480933025:2916], ActorId: [1:7593707225480933026:2917], TraceId: ExecutionId: 517a1367-1e0e647d-ae72f623-6c341174, RequestDatabase: /dc-1, Got response [1:7593707225480933027:2918] SUCCESS 2026-01-10T12:22:21.863451Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7593707225480933024:2915] ActorId: [1:7593707225480933025:2916] Database: /dc-1 ExecutionId: 517a1367-1e0e647d-ae72f623-6c341174. Extracted script execution operation [1:7593707225480933027:2918], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [1:7593707216890997871:2668], LeaseGeneration: 0 2026-01-10T12:22:21.863477Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7593707225480933024:2915] ActorId: [1:7593707225480933025:2916] Database: /dc-1 ExecutionId: 517a1367-1e0e647d-ae72f623-6c341174. Reply success 2026-01-10T12:22:21.863542Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=M2ZiY2FhMmUtZmFjMmM5NGMtZDU2ODhiZGMtMmUzZGJiYzA=, workerId: [1:7593707225480933029:2494], local sessions count: 0 2026-01-10T12:22:21.880820Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxq2xr2kvwwc58xk3b5j8k", Request has 18444976025967.670823s seconds to be completed 2026-01-10T12:22:21.882285Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxq2xr2kvwwc58xk3b5j8k", Created new session, sessionId: ydb://session/3?node_id=1&id=ZjllOWMzNTItMjU3NDY2ODYtNGIyYzBkMjMtNjY0MGQxNQ==, workerId: [1:7593707225480933062:2509], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:22:21.882397Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxq2xr2kvwwc58xk3b5j8k 2026-01-10T12:22:21.889266Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekxq2y0116sezmg2wktkftt, Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=ZjllOWMzNTItMjU3NDY2ODYtNGIyYzBkMjMtNjY0MGQxNQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 38, targetId: [1:7593707225480933062:2509] 2026-01-10T12:22:21.889309Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 38 timeout: 600.000000s actor id: [1:7593707225480933066:2927] 2026-01-10T12:22:21.903782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:21.908342Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxq2y0116sezmg2wktkftt", Forwarded response to sender actor, requestId: 38, sender: [1:7593707225480933064:2510], selfId: [1:7593707204006095141:2266], source: [1:7593707225480933062:2509] --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:22:21.915218Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Describe result: PathErrorUnknown 2026-01-10T12:22:21.915245Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Creating table 2026-01-10T12:22:21.915287Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-10T12:22:21.917515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:21.918352Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710685 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-10T12:22:21.918407Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Subscribe on create table tx: 281474976710685 2026-01-10T12:22:21.920299Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Subscribe on tx: 281474976710685 registered 2026-01-10T12:22:21.943343Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Request: create. Transaction completed: 281474976710685. Doublechecking... 2026-01-10T12:22:21.997958Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:22:21.998032Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7593707225480933076:2934] Owner: [1:7593707225480933075:2933]. Column diff is empty, finishing 2026-01-10T12:22:22.010918Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxq31t47w8ncvep41qwhjg", Request has 18444976025967.540735s seconds to be completed 2026-01-10T12:22:22.012790Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxq31t47w8ncvep41qwhjg", Created new session, sessionId: ydb://session/3?node_id=1&id=YjJhNzMyMTctOTJkOWZlODQtNzUyMWI2OGMtZDA1YWQ3YjA=, workerId: [1:7593707229775900501:2519], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:22:22.012961Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxq31t47w8ncvep41qwhjg 2026-01-10T12:22:22.028617Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-10T12:22:22.028685Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-10T12:22:22.028787Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-10T12:22:22.029965Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YjJhNzMyMTctOTJkOWZlODQtNzUyMWI2OGMtZDA1YWQ3YjA=, workerId: [1:7593707229775900501:2519], local sessions count: 1 2026-01-10T12:22:22.030637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:22:22.031549Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710686 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:22.031611Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Subscribe on create table tx: 281474976710686 2026-01-10T12:22:22.031947Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Subscribe on tx: 281474976710686 registered 2026-01-10T12:22:22.039926Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Request: alter. Transaction completed: 281474976710686. Doublechecking... 2026-01-10T12:22:22.100870Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:22.100922Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Column diff is empty, finishing 2026-01-10T12:22:22.101004Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-10T12:22:22.101863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:22.102671Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710687 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:22.102685Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [1:7593707229775900507:3038] Owner: [1:7593707229775900506:3037]. Successful alter request: ExecComplete 2026-01-10T12:22:22.114236Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxq3523c1wdfgqpk8f3jfj", Request has 18444976025967.437400s seconds to be completed 2026-01-10T12:22:22.115582Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxq3523c1wdfgqpk8f3jfj", Created new session, sessionId: ydb://session/3?node_id=1&id=YjIzZjVlMjMtNzMzNzcyMDAtY2I2MmQ5NWMtNTE1YjgyMWE=, workerId: [1:7593707229775900548:2526], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:22:22.115691Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxq3523c1wdfgqpk8f3jfj 2026-01-10T12:22:22.130829Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YjIzZjVlMjMtNzMzNzcyMDAtY2I2MmQ5NWMtNTE1YjgyMWE=, workerId: [1:7593707229775900548:2526], local sessions count: 1 2026-01-10T12:22:22.133744Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=ZjllOWMzNTItMjU3NDY2ODYtNGIyYzBkMjMtNjY0MGQxNQ==, workerId: [1:7593707225480933062:2509], local sessions count: 0 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> DataShardOutOfOrder::UncommittedReads >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.655374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.655426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.668305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.668371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.668480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.674436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.674606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.675118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.675256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.676718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.676841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.677721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.677766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.677817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.677852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.677884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.678047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.800090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.800788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.800873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.800920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.800980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.801467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1172:3119], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 303, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:24.374689Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710766:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710766 msg type: 269090816 2026-01-10T12:22:24.374772Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710766, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:22:24.374889Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710766, at schemeshard: 72057594046678944 2026-01-10T12:22:24.374914Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 0/1, is published: true 2026-01-10T12:22:24.374941Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710766 at step: 5000047 FAKE_COORDINATOR: advance: minStep5000047 State->FrontStep: 5000046 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710766 at step: 5000047 2026-01-10T12:22:24.375087Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000047, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:24.375157Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710766 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 25769805935 } } Step: 5000047 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:24.375206Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 5000047 2026-01-10T12:22:24.375249Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710766:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710766 2026-01-10T12:22:24.376472Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710766:0, at schemeshard: 72057594046678944 2026-01-10T12:22:24.376511Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710766:0 ProgressState 2026-01-10T12:22:24.376572Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-10T12:22:24.376594Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-10T12:22:24.376626Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-10T12:22:24.376647Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-10T12:22:24.376676Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 1/1, is published: true 2026-01-10T12:22:24.376714Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710766 2026-01-10T12:22:24.376751Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-10T12:22:24.376789Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710766:0 2026-01-10T12:22:24.376818Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710766:0 2026-01-10T12:22:24.376866Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:22:24.377929Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2026-01-10T12:22:24.377970Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710766 2026-01-10T12:22:24.378014Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710766 2026-01-10T12:22:24.378122Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1172:3119], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 303, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2026-01-10T12:22:24.379327Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-10T12:22:24.379408Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1172:3119], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 303, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:24.379443Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:22:24.380792Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-10T12:22:24.380917Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1172:3119], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 303, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:24.380974Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-10T12:22:24.381151Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:22:24.381206Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1173:3120] TestWaitNotification: OK eventTxId 103 2026-01-10T12:22:24.381816Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 103 2026-01-10T12:22:24.382181Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:46.640329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.753556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:46.761823Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:46.762237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:46.762339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:47.099924Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.201616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.201755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.236192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.312539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:47.998762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:48.000056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:48.000127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:48.000162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:48.000318Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:48.066169Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:48.587863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:50.808668Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:50.813871Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:50.815648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:50.837661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:50.837750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:50.864740Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:50.866364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:51.036981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:51.037128Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:51.038527Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.038885Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.039289Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.039928Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.040167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.040353Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.040461Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.040604Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.040671Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:51.055399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:51.237507Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:51.267467Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:51.267547Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:51.291231Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:51.292353Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:51.292652Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:51.292725Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:51.292808Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:51.292872Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:51.292926Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:51.292991Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:51.293554Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:51.309042Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:51.309117Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:51.327372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2174:2624] 2026-01-10T12:21:51.328319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2174:2624], schemeshard id = 72075186224037897 2026-01-10T12:21:51.375568Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:21:51.378459Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:51.389822Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Describe result: PathErrorUnknown 2026-01-10T12:21:51.389870Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Creating table 2026-01-10T12:21:51.389930Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:51.401379Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2324:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:51.418486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:51.435667Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:51.435770Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:51.444264Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:51.607231Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:51.619037Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:51.854424Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:51.950658Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:51.950723Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2241:2661] Owner: [2:2240:2660]. Column diff is empty, finishing 2026-01-10T12:21:52.656477Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2026-01-10T12:22:23.487522Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-10T12:22:23.487549Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1768047743394115 2026-01-10T12:22:23.487577Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-10T12:22:23.487602Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-10T12:22:23.487670Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-10T12:22:23.487726Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:23.487811Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:23.487867Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:22:23.487916Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:22:23.487964Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:23.488068Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:23.507812Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:23.508745Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-10T12:22:23.508852Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-10T12:22:23.509051Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5555:4761] Owner: [2:5554:4760]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:23.509156Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5555:4761] Owner: [2:5554:4760]. Column diff is empty, finishing 2026-01-10T12:22:23.510773Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-10T12:22:23.510832Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-10T12:22:23.513162Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-10T12:22:23.530140Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5564:4768] 2026-01-10T12:22:23.530301Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5520:4740], server id = [2:5564:4768], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:23.530535Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5564:4768], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:23.530675Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5565:4769] 2026-01-10T12:22:23.530854Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5565:4769], schemeshard id = 72075186224037897 2026-01-10T12:22:23.580167Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-10T12:22:23.580351Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-10T12:22:23.581033Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5570:4774], server id = [2:5574:4778], tablet id = 72075186224037899, status = OK 2026-01-10T12:22:23.581417Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5570:4774], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:23.582737Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5571:4775], server id = [2:5575:4779], tablet id = 72075186224037900, status = OK 2026-01-10T12:22:23.582790Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5571:4775], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:23.582875Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5572:4776], server id = [2:5576:4780], tablet id = 72075186224037901, status = OK 2026-01-10T12:22:23.582907Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5572:4776], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:23.583185Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5573:4777], server id = [2:5577:4781], tablet id = 72075186224037902, status = OK 2026-01-10T12:22:23.583222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5573:4777], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-10T12:22:23.587919Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-10T12:22:23.588419Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5570:4774], server id = [2:5574:4778], tablet id = 72075186224037899 2026-01-10T12:22:23.588457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:23.588833Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-10T12:22:23.589230Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5571:4775], server id = [2:5575:4779], tablet id = 72075186224037900 2026-01-10T12:22:23.589255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:23.589980Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-10T12:22:23.590335Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5572:4776], server id = [2:5576:4780], tablet id = 72075186224037901 2026-01-10T12:22:23.590356Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:23.590475Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-10T12:22:23.590505Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-10T12:22:23.590661Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-10T12:22:23.590774Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-10T12:22:23.590963Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5590:4790], ActorId: [2:5591:4791], Starting query actor #1 [2:5592:4792] 2026-01-10T12:22:23.591019Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5591:4791], ActorId: [2:5592:4792], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:23.593324Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5573:4777], server id = [2:5577:4781], tablet id = 72075186224037902 2026-01-10T12:22:23.593356Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-10T12:22:23.594041Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5591:4791], ActorId: [2:5592:4792], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDQ5MTc1OWYtYjU3NmEwOGMtNzdhZjA4MDItZmZiMWRhMjU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:23.677402Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5601:4801]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:23.677641Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:23.677697Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5601:4801], StatRequests.size() = 1 2026-01-10T12:22:23.809701Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5591:4791], ActorId: [2:5592:4792], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDQ5MTc1OWYtYjU3NmEwOGMtNzdhZjA4MDItZmZiMWRhMjU=, TxId: 2026-01-10T12:22:23.809782Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5591:4791], ActorId: [2:5592:4792], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDQ5MTc1OWYtYjU3NmEwOGMtNzdhZjA4MDItZmZiMWRhMjU=, TxId: 2026-01-10T12:22:23.810210Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5590:4790], ActorId: [2:5591:4791], Got response [2:5592:4792] SUCCESS 2026-01-10T12:22:23.810576Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:23.847179Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:23.847244Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-10T12:22:23.925961Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5620:4809]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:23.926332Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:23.926402Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:23.926686Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:22:23.926746Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:23.926807Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:23.930644Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> IndexBuildTest::BaseCaseUniq [GOOD] >> IndexBuildTest::CancelBuild |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataShardOutOfOrder::TestReadTableWriteConflict >> TxUsage::WriteToTopic_Demo_16_Query [GOOD] >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:29.792643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:29.905173Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:29.915411Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:29.915811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:29.915916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:30.314899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:30.415240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.415366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.449463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.535576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:31.212485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:31.214173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:31.214227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:31.214263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:31.214497Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:31.283546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:31.843612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:34.783459Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:34.783550Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:34.795272Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:34.822935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:34.823024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:34.862297Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:34.864047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:35.028126Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:35.028262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:35.029915Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.030791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.031807Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.032626Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.032776Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.033073Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.033203Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.033368Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.033582Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:35.049467Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:35.267206Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:35.296903Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:35.297013Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:35.340265Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:35.341621Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:35.341862Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:35.341926Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:35.341990Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:35.342046Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:35.342101Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:35.342159Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:35.343026Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:35.357025Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:35.357136Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:35.382148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:21:35.383465Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:35.444989Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:21:35.448348Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:35.468602Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Describe result: PathErrorUnknown 2026-01-10T12:21:35.468671Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Creating table 2026-01-10T12:21:35.468761Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:35.475749Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:35.480271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:35.488569Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:35.488711Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:35.503461Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:35.688543Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:35.716705Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:36.084501Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:36.213978Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:36.214064Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Column diff is empty, finishing 2026-01-10T12:21:36.912513Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... d=NzU5ODI4YTktNDJiN2UxZGYtMzk3MDc5MjktMjdmN2IwNWI=, TxId: 2026-01-10T12:22:14.527750Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4587:3943], ActorId: [2:4589:3944], Got response [2:4590:3945] SUCCESS 2026-01-10T12:22:14.528100Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:14.541897Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:14.541967Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3380:3501] 2026-01-10T12:22:14.597856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:22:14.597965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:22:14.663465Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4420:3813], schemeshard count = 1 2026-01-10T12:22:15.486887Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:22:15.491275Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:15.494827Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-10T12:22:15.494965Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], Start read next stream part 2026-01-10T12:22:15.534262Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4675:3996]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:15.534516Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:22:15.534548Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4675:3996], StatRequests.size() = 1 2026-01-10T12:22:21.275259Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:21.275445Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], Start read next stream part 2026-01-10T12:22:21.275679Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:21.278495Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 281474976720666] shutting down 2026-01-10T12:22:21.278907Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:21.278961Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4658:3986], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGVmYWZlM2EtZDhjYmFlNzMtYjI1YzUzYzAtYWM3MjRjOTQ=, TxId: 2026-01-10T12:22:21.279094Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-10T12:22:21.279193Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], Start read next stream part 2026-01-10T12:22:21.338830Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4767:4068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:21.339113Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-10T12:22:21.339159Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4767:4068], StatRequests.size() = 1 2026-01-10T12:22:24.976926Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:24.977019Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], Start read next stream part 2026-01-10T12:22:24.977482Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33110, txId: 281474976720669] shutting down 2026-01-10T12:22:24.977683Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:24.977725Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4648:2460], ActorId: [2:4749:4053], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWJkNzBkNTYtNDRhZDU0N2ItN2I0NGIzODAtNDYxYmU3MTY=, TxId: 2026-01-10T12:22:24.977913Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4832:4116], ActorId: [2:4834:4118], Starting query actor #1 [2:4835:4119] 2026-01-10T12:22:24.977947Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4834:4118], ActorId: [2:4835:4119], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:24.981770Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4834:4118], ActorId: [2:4835:4119], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWFiMDgwOC02NTRlNDc4MC0xN2I1ZWFlNy0zYzljMDky, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:25.076245Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4834:4118], ActorId: [2:4835:4119], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWFiMDgwOC02NTRlNDc4MC0xN2I1ZWFlNy0zYzljMDky, TxId: 2026-01-10T12:22:25.076316Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4834:4118], ActorId: [2:4835:4119], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWFiMDgwOC02NTRlNDc4MC0xN2I1ZWFlNy0zYzljMDky, TxId: 2026-01-10T12:22:25.076698Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4832:4116], ActorId: [2:4834:4118], Got response [2:4835:4119] SUCCESS 2026-01-10T12:22:25.077018Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:25.091799Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-10T12:22:25.091870Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3380:3501] 2026-01-10T12:22:25.092550Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4866:4140]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:22:25.092954Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-10T12:22:25.093012Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:25.093363Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-10T12:22:25.093425Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 6 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:25.093487Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:25.093762Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:22:25.098891Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-10T12:22:25.100280Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-10T12:22:25.100477Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-10T12:22:25.101192Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [2:4880:4154]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-10T12:22:25.101595Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-10T12:22:25.101657Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:22:25.101956Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-10T12:22:25.102016Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 7 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:22:25.102084Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-10T12:22:25.102346Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-10T12:22:25.104948Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-10T12:22:25.105345Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 2026-01-10T12:22:25.107537Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_46_Table >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [GOOD] >> DataShardTxOrder::RandomPoints_DelayData >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> TableCreation::CreateOldTable >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> DataShardTxOrder::RandomPointsAndRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2026-01-10T12:22:27.635620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:27.636746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:27.636797Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:27.646964Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:27.647303Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:27.647550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:27.687708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:27.696546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:27.697211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:27.698786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:27.698854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:27.698907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:27.699302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:27.699385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:27.699455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:27.783249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:27.819544Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:27.819707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:27.819818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:27.819869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:27.819905Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:27.819938Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:27.820195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.820249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.820529Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:27.820623Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:27.820683Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:27.820730Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:27.820766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:27.820798Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:27.820847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:27.820881Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:27.820919Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:27.821019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.821056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.821123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:27.827830Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:27.827920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:27.828023Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:27.828218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:27.828300Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:27.828366Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:27.828423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:27.828466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:27.828505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:27.828538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:27.828849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:27.828907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:27.828948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:27.828994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:27.829070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:27.829124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:27.829159Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:27.829194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:27.829223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:27.841223Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:27.841285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:27.841330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:27.841396Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:27.841464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:27.842028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.842090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.842137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:27.842221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:27.842255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:27.842366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:27.842407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:27.842443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:27.842497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:27.846048Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:27.846109Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:27.846294Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.846334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.846390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:27.846429Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:27.846467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:27.846504Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:27.846542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 22:28.831450Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.831593Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:22:28.831619Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:22:28.831638Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:22:28.831657Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.831679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.831711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.831763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-10T12:22:28.831794Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.831902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.831931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.831967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.832031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-10T12:22:28.832060Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.832243Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.832272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.832307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.832345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-10T12:22:28.832378Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.832568Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.832602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:14] at 9437184 on unit FinishPropose 2026-01-10T12:22:28.832646Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-10T12:22:28.832722Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.832852Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.832894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.832930Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.833012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-10T12:22:28.833040Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.833185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.833211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.833255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.833312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-10T12:22:28.833342Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.833499Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.833525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.833573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:28.833600Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.833751Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:22:28.833783Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.833805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2026-01-10T12:22:28.833837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:28.833874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-10T12:22:28.833955Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.834172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-10T12:22:28.834231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834276Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2026-01-10T12:22:28.834393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-10T12:22:28.834425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834454Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2026-01-10T12:22:28.834528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-10T12:22:28.834551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834573Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2026-01-10T12:22:28.834623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-10T12:22:28.834649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834670Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2026-01-10T12:22:28.834736Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-10T12:22:28.834763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834785Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2026-01-10T12:22:28.834865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-10T12:22:28.834906Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.834933Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2026-01-10T12:22:28.834993Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-10T12:22:28.835021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.835042Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2026-01-10T12:22:28.835094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-10T12:22:28.835116Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:28.835138Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::PingNotExistedSession >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::NoLocalSessionExecution >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> IndexBuildTest::CancelBuild [GOOD] >> DataShardTxOrder::ZigZag [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> DataShardOutOfOrder::UncommittedReads [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2026-01-10T12:22:23.918653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:23.919823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:23.919875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:23.931533Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:23.931918Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:23.932246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:23.974576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:23.983108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:23.983719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:23.985314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:23.985395Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:23.985453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:23.985770Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:23.985845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:23.985904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:24.075881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:24.101610Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:24.101810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:24.101932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:24.101988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:24.102049Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:24.102085Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:24.102320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:24.102368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:24.102692Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:24.102794Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:24.102846Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:24.102896Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:24.102933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:24.102987Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:24.103035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:24.103073Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:24.103112Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:24.103206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:24.103240Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:24.103293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:24.110059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:24.110138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:24.110232Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:24.110448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:24.110526Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:24.110589Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:24.110647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:24.110696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:24.110744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:24.110779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:24.111083Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:24.111126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:24.111173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:24.111212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:24.111282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:24.111318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:24.111356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:24.111390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:24.111415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:24.123722Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:24.123802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:24.123839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:24.123899Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:24.123976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:24.124604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:24.124666Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:24.124725Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:24.124816Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:24.124860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:24.125012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:24.125073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:24.125111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:24.125149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:24.129052Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:24.129124Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:24.129347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:24.129393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:24.129450Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:24.129491Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:24.129546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:24.129589Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:24.129631Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 6-01-10T12:22:30.503627Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:30.503814Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:239:2231], Recipient [2:239:2231]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.503852Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.503890Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:30.503920Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:30.503952Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:30.503992Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2026-01-10T12:22:30.504018Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2026-01-10T12:22:30.504058Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.504103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2026-01-10T12:22:30.504127Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2026-01-10T12:22:30.504150Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2026-01-10T12:22:30.504834Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2026-01-10T12:22:30.504890Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.504938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2026-01-10T12:22:30.504968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-10T12:22:30.504996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2026-01-10T12:22:30.505035Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505060Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-10T12:22:30.505103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:30.505129Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:22:30.505169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2026-01-10T12:22:30.505195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2026-01-10T12:22:30.505237Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2026-01-10T12:22:30.505273Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505317Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:30.505339Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2026-01-10T12:22:30.505361Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2026-01-10T12:22:30.505407Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505428Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2026-01-10T12:22:30.505449Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2026-01-10T12:22:30.505469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2026-01-10T12:22:30.505492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505511Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2026-01-10T12:22:30.505539Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2026-01-10T12:22:30.505568Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2026-01-10T12:22:30.505611Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505636Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2026-01-10T12:22:30.505657Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2026-01-10T12:22:30.505678Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2026-01-10T12:22:30.505700Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2026-01-10T12:22:30.505763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:22:30.505787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2026-01-10T12:22:30.505807Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.505827Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:22:30.505859Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:22:30.505889Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2026-01-10T12:22:30.506222Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2026-01-10T12:22:30.506290Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:30.506341Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.506370Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:22:30.506393Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2026-01-10T12:22:30.506453Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2026-01-10T12:22:30.506627Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2026-01-10T12:22:30.506653Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2026-01-10T12:22:30.506934Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2026-01-10T12:22:30.506967Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2026-01-10T12:22:30.507007Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-10T12:22:30.507031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2026-01-10T12:22:30.507055Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2026-01-10T12:22:30.507081Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:30.507105Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:30.507131Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:30.507158Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:30.520234Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-10T12:22:30.520305Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-10T12:22:30.520366Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:30.520410Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-10T12:22:30.520470Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:30.520516Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:30.520838Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-10T12:22:30.520877Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-10T12:22:30.520911Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:22:30.520951Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-10T12:22:30.520991Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:30.521029Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:22.048195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:22.161963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:22.171337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:22.171745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:22.171846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:22.488993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:22.489140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:22.494016Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047739890602 != 1768047739890605 2026-01-10T12:22:22.502602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:22.544717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:22.642174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:22.925115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:22.939027Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:23.042638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:23.083367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:23.084444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:23.084756Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:23.085020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:23.133727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:23.134596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:23.134723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:23.136472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:23.136554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:23.136633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:23.137022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:23.137184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:23.137276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:23.148112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:23.178038Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:23.178243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:23.178352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:23.178394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:23.178428Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:23.178466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:23.178710Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:23.178759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:23.179124Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:23.179224Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:23.179343Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:23.179393Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:23.179436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:23.179474Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:23.179511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:23.179546Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:23.179596Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:23.179715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:23.179758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:23.179817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:23.179905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:23.179949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:23.180090Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:23.180362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:23.180414Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:23.180513Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:23.180562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:23.180603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:23.180639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:23.180701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:23.181009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:23.181063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:23.181128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:23.181167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:23.181236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:23.181272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:23.181307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:23.181337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:23.181363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:23.183193Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:23.183251Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:23.193930Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... aiting for: CA [2:1136:2834], trace_id# 2026-01-10T12:22:29.995273Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1136:2834], trace_id# 2026-01-10T12:22:29.995327Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:22:29.996383Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1136:2834] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:22:29.996461Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1136:2834], trace_id# 2026-01-10T12:22:29.996523Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1136:2834], trace_id# 2026-01-10T12:22:29.997491Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1136:2834] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 840 Tasks { TaskId: 1 CpuTimeUs: 369 FinishTimeMs: 1768047749997 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 21 BuildCpuTimeUs: 348 HostName: "ghrun-tjszgfszoq" NodeId: 2 CreateTimeMs: 1768047749995 UpdateTimeMs: 1768047749997 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:29.997568Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1136:2834] 2026-01-10T12:22:29.997668Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1132:2834] trace_id# 2026-01-10T12:22:29.997740Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000840s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:22:29.998193Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [2:1138:2834], Recipient [2:892:2768]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-10T12:22:29.998246Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-10T12:22:29.998369Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [2:892:2768], Recipient [2:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:22:29.998410Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:22:29.998482Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-10T12:22:29.998609Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-10T12:22:29.998690Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 5) 2026-01-10T12:22:29.998746Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 5) table: [72057594046644480:38:1] 2026-01-10T12:22:29.998836Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckWrite 2026-01-10T12:22:29.998893Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:29.998932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:22:29.998968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:29.999022Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:29.999082Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:22:29.999151Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-10T12:22:29.999192Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:29.999221Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:29.999248Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:22:29.999274Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:22:29.999301Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:29.999325Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:22:29.999347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:22:29.999369Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:22:29.999401Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-10T12:22:29.999456Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:22:29.999597Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-10T12:22:29.999658Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:22:29.999738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:29.999773Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:22:29.999818Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:22:29.999858Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:29.999954Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:29.999999Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:22:30.000044Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:30.000109Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:30.000161Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:30.000195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:30.000236Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-10T12:22:30.011125Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:22:30.011227Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:30.011286Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 4 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:22:30.011385Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:30.011945Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1133:2834] TxId: 281474976715662. Ctx: { TraceId: 01kekxqas20tfr5qz9g58v5de5, Database: , SessionId: ydb://session/3?node_id=2&id=MzViMDY0NzctYjdkNzU2MGQtM2ExZGM1YWMtMzMzYmY4N2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:24.129913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:24.207408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:24.214709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:24.215042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:24.215157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:24.539106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:24.539206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:24.544092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047741701693 != 1768047741701696 2026-01-10T12:22:24.554070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:24.599836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:24.685156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:24.969622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:24.972234Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:22:24.972321Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-10T12:22:24.984440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:25.079890Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-10T12:22:25.080018Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-10T12:22:25.080323Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-10T12:22:25.080762Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-10T12:22:25.080827Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-10T12:22:25.082607Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-10T12:22:25.082719Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-10T12:22:25.082765Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-10T12:22:25.082817Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-10T12:22:25.089205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:25.127066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:25.128250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:25.128586Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-10T12:22:25.128874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:25.166566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:25.167198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:25.167288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:25.168736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:25.168803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:25.168845Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:25.169179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:25.169283Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:25.169365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2765] in generation 1 2026-01-10T12:22:25.169696Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:25.193575Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:25.193800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:25.193930Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2775] 2026-01-10T12:22:25.193972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:25.194014Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:25.194057Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:25.194252Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.194323Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.194659Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:25.194787Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:25.194897Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:25.194953Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:25.195018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:25.195056Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:25.195092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:25.195133Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:25.195189Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:25.195273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.195329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.195380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2762], serverId# [1:887:2766], sessionId# [0:0:0] 2026-01-10T12:22:25.195482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2766] 2026-01-10T12:22:25.195541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:25.195664Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:25.195911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:25.196025Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:25.196155Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:25.196217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:25.196254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:25.196292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:25.196326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:25.196682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplet ... shard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1592:3263], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-10T12:22:29.836029Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:29.836130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:29.836217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:29.836257Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:29.836300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:29.836336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:29.836386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-10T12:22:29.836438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:29.836465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:29.836492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:29.836517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:29.836643Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:29.836898Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:22:29.837002Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4500/18446744073709551615 2026-01-10T12:22:29.837056Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1592:3263], 0} after executionsCount# 1 2026-01-10T12:22:29.837101Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1592:3263], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:29.837170Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1592:3263], 0} finished in read 2026-01-10T12:22:29.837232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:29.837261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:29.837287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:29.837318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:29.837366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:29.837388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:29.837425Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T12:22:29.837469Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:29.837581Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:29.838903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1592:3263], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:29.838966Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2026-01-10T12:22:29.958145Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2026-01-10T12:22:29.958225Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 5500 in 0.499900s at 5.450000s 2026-01-10T12:22:29.960489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1614:3280], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-10T12:22:29.960622Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:29.960682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:29.960750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:29.960789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:29.960826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:29.960855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:29.960884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-10T12:22:29.960917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:29.960967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:29.961004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:29.961025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:29.961112Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:29.961331Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:22:29.961372Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-10T12:22:29.961410Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1614:3280], 0} after executionsCount# 1 2026-01-10T12:22:29.961448Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1614:3280], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:29.961519Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1614:3280], 0} finished in read 2026-01-10T12:22:29.961578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:29.961599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:29.961617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:29.961636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:29.961663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:29.961679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:29.961697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-10T12:22:29.961738Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:29.961815Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:29.961982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [1:70:2117], Recipient [1:1535:3228]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2026-01-10T12:22:29.962663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1614:3280], Recipient [1:1535:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:29.962706Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.655336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.655392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.666881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.666959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.667081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.673521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.673744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.674488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.674692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.676852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.677007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.678072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.678130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.678208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.678255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.678318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.678553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.834716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.835974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.836092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:22:30.390554Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:130:2154] message: TxId: 281474976710760 2026-01-10T12:22:30.390605Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:22:30.390645Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:22:30.390680Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:22:30.390765Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:22:30.393761Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:22:30.393839Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:22:30.393940Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-10T12:22:30.394085Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1575:3436], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:22:30.396477Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-10T12:22:30.396634Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1575:3436], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:30.396713Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-10T12:22:30.399188Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-10T12:22:30.399346Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1575:3436], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:22:30.399403Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-10T12:22:30.399579Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:22:30.399641Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:1674:3524] TestWaitNotification: OK eventTxId 102 2026-01-10T12:22:30.402636Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-10T12:22:30.402960Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-10T12:22:30.406269Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:30.406585Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 340us result status StatusSuccess 2026-01-10T12:22:30.407137Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:30.409965Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:22:30.410248Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 331us result status StatusPathDoesNotExist 2026-01-10T12:22:30.410457Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:20.337841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:20.441400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:20.451166Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:20.451625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:20.451702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:20.851115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:20.851200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:20.855405Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047738310355 != 1768047738310358 2026-01-10T12:22:20.867585Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:20.912881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:21.017481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:21.357747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:21.372491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:21.480372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:21.530854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:21.532127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:21.532455Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:21.532720Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:21.580353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:21.581081Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:21.581208Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:21.582837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:21.582941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:21.582997Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:21.583357Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:21.583489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:21.583561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:21.594343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:21.631434Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:21.631628Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:21.631729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:21.631766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:21.631820Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:21.631860Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:21.632092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:21.632168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:21.632476Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:21.632590Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:21.632733Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:21.632783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:21.632821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:21.632856Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:21.632895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:21.632929Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:21.632975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:21.633071Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:21.633125Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:21.633191Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:21.633274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:21.633336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:21.633451Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:21.633687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:21.633739Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:21.633853Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:21.633914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:21.633975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:21.634027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:21.634063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:21.634366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:21.634403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:21.634453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:21.634489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:21.634552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:21.634579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:21.634612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:21.634658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:21.634695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:21.636615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:21.636668Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:21.647353Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... -01-10T12:22:30.459113Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1270:3057] 2026-01-10T12:22:30.459162Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1273:3060], CA [2:1274:3061], CA [2:1271:3058], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-10T12:22:30.459195Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1273:3060], CA [2:1274:3061], CA [2:1271:3058], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-10T12:22:30.459323Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1271:3058] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 381 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 241 FinishTimeMs: 1768047750457 InputRows: 1 InputBytes: 6 ComputeCpuTimeUs: 170 BuildCpuTimeUs: 71 WaitOutputTimeUs: 387 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047750457 CreateTimeMs: 1768047750450 UpdateTimeMs: 1768047750457 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.459353Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1271:3058] 2026-01-10T12:22:30.459385Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-10T12:22:30.459412Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], CA [2:1272:3059], trace_id# 2026-01-10T12:22:30.459470Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1277:3057], Recipient [2:970:2824]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:30.459495Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } 2026-01-10T12:22:30.459565Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1272:3059] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 263 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 160 FinishTimeMs: 1768047750457 InputRows: 1 InputBytes: 6 ComputeCpuTimeUs: 124 BuildCpuTimeUs: 36 WaitOutputTimeUs: 483 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047750457 CreateTimeMs: 1768047750450 UpdateTimeMs: 1768047750457 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.459598Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1272:3059] 2026-01-10T12:22:30.459628Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.459661Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1273:3060], CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.459737Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1273:3060] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 400 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 279 FinishTimeMs: 1768047750457 InputRows: 2 InputBytes: 12 ComputeCpuTimeUs: 220 BuildCpuTimeUs: 59 WaitOutputTimeUs: 561 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047750457 CreateTimeMs: 1768047750450 UpdateTimeMs: 1768047750457 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.459758Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1273:3060] 2026-01-10T12:22:30.459797Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.459842Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1274:3061], CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.459938Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1274:3061] TaskId# 6 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 248 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 139 FinishTimeMs: 1768047750457 InputRows: 2 InputBytes: 8 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 59 WaitOutputTimeUs: 579 HostName: "ghrun-tjszgfszoq" NodeId: 2 CreateTimeMs: 1768047750450 UpdateTimeMs: 1768047750458 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.459972Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1274:3061] 2026-01-10T12:22:30.460001Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.460024Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1275:3062], trace_id# 2026-01-10T12:22:30.460199Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1275:3062] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 215 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 124 FinishTimeMs: 1768047750458 InputRows: 2 InputBytes: 8 ComputeCpuTimeUs: 91 BuildCpuTimeUs: 33 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047750458 CreateTimeMs: 1768047750450 UpdateTimeMs: 1768047750458 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.460224Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1275:3062] 2026-01-10T12:22:30.460329Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:30.460406Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1265:3038] TxId: 281474976715667. Ctx: { TraceId: 01kekxqay7dhpd73pdqjgrsmq0, Database: , SessionId: ydb://session/3?node_id=2&id=YjNjNDY2OGYtOTAwNTc0ODUtYWNlNDAzYzQtNDJhYjk3YTQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002731s ReadRows: 2 ReadBytes: 16 RequestUnits# 2 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:28.197162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:28.311708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:28.320135Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:28.320567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:28.320694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:28.596058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:28.596215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:28.600799Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047745978713 != 1768047745978716 2026-01-10T12:22:28.609389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:28.651275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:28.730642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:29.007105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:29.021349Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:29.123298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:29.154644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:29.155387Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:29.155617Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:29.155772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:29.188256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:29.188865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:29.188962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:29.190392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:29.190449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:29.190486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:29.190829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:29.190947Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:29.191005Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:29.191301Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:29.218784Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:29.218952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:29.219058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:29.219086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:29.219112Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:29.219151Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:29.219321Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.219361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.219605Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:29.219671Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:29.219741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:29.219783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:29.219816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:29.219855Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:29.219886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:29.219917Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:29.219946Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:29.220025Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.220051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.220095Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:29.220150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:29.220178Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:29.220270Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:29.220475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:29.220540Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:29.220629Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:29.220667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:29.220695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:29.220721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:29.220745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:29.220964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:29.220998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:29.221039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:29.221074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:29.221115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:29.221140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:29.221173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:29.221204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:29.221221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:29.222195Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:22:29.222270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:29.222305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:29.222337Z node 1 :TX_DATASHARD T ... 12:22:30.529497Z node 1 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2026-01-10T12:22:30.529558Z node 1 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:38:1] 2026-01-10T12:22:30.529656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-10T12:22:30.529730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:22:30.529784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:22:30.529844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:30.529884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:30.529930Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-10T12:22:30.529991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-10T12:22:30.530031Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:22:30.530063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:30.530091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:22:30.530113Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:22:30.530136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:22:30.530164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:22:30.530193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:22:30.530217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:22:30.530251Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-10T12:22:30.530321Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-10T12:22:30.530460Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037888, row count=1 2026-01-10T12:22:30.530517Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:22:30.530592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:30.530625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:22:30.530667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:22:30.530698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:30.530759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-10T12:22:30.530808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:22:30.530847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:30.530890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:30.530932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:22:30.530957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:30.530986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2026-01-10T12:22:30.682137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1168:2957], Recipient [1:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:22:30.682318Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:30.682377Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-10T12:22:30.682427Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2026-01-10T12:22:30.682516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:30.682627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:30.682667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:30.682706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:30.682761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:30.682817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:22:30.682855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:30.682886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:30.682918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:30.682950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:30.683102Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:22:30.683370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayComplete 2026-01-10T12:22:30.683416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:30.683457Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:30.683494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:30.683558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:30.683587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:30.683615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:22:30.683675Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:30.758006Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 4000 2026-01-10T12:22:30.758114Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2026-01-10T12:22:30.898840Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:22:30.898922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:30.898969Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2026-01-10T12:22:30.899091Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:30.899210Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:30.899253Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:30.899297Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1168:2957], 0} after executionsCount# 1 2026-01-10T12:22:30.899332Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1168:2957], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:30.899471Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1168:2957], 0} finished in read 2026-01-10T12:22:30.901780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1168:2957], Recipient [1:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:30.901830Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:21.681039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:21.756950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:21.763154Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:21.763462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:21.763526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:22.039203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:22.039331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:22.043845Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047739343690 != 1768047739343693 2026-01-10T12:22:22.051803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:22.092599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:22.171480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:22.450950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:22.464871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:22.566463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:22.599661Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:22.600435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:22.600693Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:22.600893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:22.639151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:22.639943Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:22.640059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:22.641806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:22.641900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:22.641968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:22.642349Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:22.642466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:22.642524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:22.653244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:22.683497Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:22.683662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:22.683739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:22.683774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:22.683809Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:22.683839Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:22.684039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.684122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:22.684390Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:22.684471Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:22.684602Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:22.684649Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:22.684680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:22.684706Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:22.684731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:22.684766Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:22.684816Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:22.684914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.684952Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:22.684995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:22.685059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:22.685131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:22.685220Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:22.685415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:22.685462Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:22.685542Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:22.685593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:22.685626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:22.685666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:22.685692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:22.685954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:22.685993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:22.686032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:22.686063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:22.686103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:22.686126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:22.686149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:22.686180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:22.686204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:22.687645Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:22.687681Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:22.698376Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... jMtNDNmMTRhM2ItNGUyYTkyMWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-01-10T12:22:30.899886Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:30.901896Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:30.902236Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1089:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1086:2904];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:30.902326Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1089:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:30.902684Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1081:2878] TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1086:2904] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 221605 Tasks { TaskId: 1 CpuTimeUs: 219012 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 11 BuildCpuTimeUs: 219001 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 2 CreateTimeMs: 1768047750076 CurrentWaitInputTimeUs: 139061 UpdateTimeMs: 1768047750899 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:30.902765Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1086:2904] 2026-01-10T12:22:30.903457Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1205} ActorId: [2:1081:2878] TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-10T12:22:30.903557Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1198} ActorId: [2:1081:2878] TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1087:2905] TaskId# 2 trace_id# 2026-01-10T12:22:30.903646Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1198} ActorId: [2:1081:2878] TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1088:2906] TaskId# 3 trace_id# 2026-01-10T12:22:30.903766Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1081:2878] TxId: 281474976715662. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:30.903880Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 0.550000s 2026-01-10T12:22:30.904232Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, ActorId: [2:1055:2878], ActorState: ExecuteState, LegacyTraceId: 01kekxqadj997e3r187t2asvjg, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-10T12:22:30.904648Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:22:30.904705Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-10T12:22:30.904789Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1087:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxqadj997e3r187t2asvjg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-10T12:22:30.904866Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1087:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxqadj997e3r187t2asvjg. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1081:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-10T12:22:30.905036Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:30.906408Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:30.906538Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1088:2906], TxId: 281474976715662, task: 3. Ctx: { TraceId : 01kekxqadj997e3r187t2asvjg. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-10T12:22:30.906614Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1088:2906], TxId: 281474976715662, task: 3. Ctx: { TraceId : 01kekxqadj997e3r187t2asvjg. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1081:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-10T12:22:30.906716Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:30.909664Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:30.910113Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-10T12:22:30.910214Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-10T12:22:30.910258Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-10T12:22:30.910362Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1122:2938] SnapshotReq marker# P0 2026-01-10T12:22:30.910852Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1124:2938] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-10T12:22:30.911087Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1124:2938] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-10T12:22:30.925908Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1122:2938] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-10T12:22:30.926183Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-10T12:22:30.926360Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1122:2938], Recipient [2:887:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-10T12:22:30.926645Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:22:30.926724Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:1121:2878] TxId: 281474976715664. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:22:30.926854Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1121:2878] TxId: 281474976715664. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:30.926919Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1121:2878] TxId: 281474976715664. Ctx: { TraceId: 01kekxqadj997e3r187t2asvjg, Database: , SessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:22:30.927180Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1055:2878] 2026-01-10T12:22:30.927986Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=MmY4YTJkM2YtNWUzYmU2YjMtNDNmMTRhM2ItNGUyYTkyMWQ=, workerId: [2:1055:2878], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 255 >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:20.663737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:20.755323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:20.764449Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:20.764989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:20.765084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:21.080400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:21.080542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:21.085164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047738283057 != 1768047738283060 2026-01-10T12:22:21.097259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:21.139874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:21.224648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:21.509188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:21.523658Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:21.627266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:21.896931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:22.110987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:22.111118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:22.111193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:22.112347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:22.112749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:22.117449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:22.264461Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:22.341370Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... performing the first select { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets ... captured readset ... captured readset ... performing an upsert ... performing the second select ... performing the third select ... performing the last upsert and commit 2026-01-10T12:22:24.734620Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MjQ4ZDM4MWUtNWViMDA2NTEtNjM1YTg1N2UtYmY1MDFjMDk=, ActorId: [1:1165:2960], ActorState: ExecuteState, LegacyTraceId: 01kekxq5nfdjzeb37g78rypxf3, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-10T12:22:28.029701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:28.039415Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:28.039745Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:28.039815Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:28.264602Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:28.264725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:28.287278Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047745320403 != 1768047745320407 2026-01-10T12:22:28.290785Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:28.332848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:28.412715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:28.674126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:28.687172Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:28.788483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:28.808897Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:28.809698Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:28.809892Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-10T12:22:28.810057Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:28.817944Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:28.845708Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:28.845838Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:28.847309Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:28.847396Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:28.847458Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:28.847785Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:28.847911Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:28.847992Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-10T12:22:28.858789Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestor ... # 4001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-10T12:22:30.250521Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:30.250565Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2026-01-10T12:22:30.986369Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1271:3042], Recipient [2:970:2824]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-10T12:22:30.986675Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1270:3041], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-10T12:22:30.986824Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:30.986905Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:30.987000Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:22:30.987047Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:30.987093Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:30.987130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:30.987182Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-10T12:22:30.987229Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:22:30.987255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:30.987278Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:30.987299Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:30.987411Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:30.987679Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:22:30.987760Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-10T12:22:30.987809Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1270:3041], 0} after executionsCount# 1 2026-01-10T12:22:30.987858Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1270:3041], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:30.987949Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1270:3041], 0} finished in read 2026-01-10T12:22:30.988034Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:22:30.988065Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:30.988109Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:30.988138Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:30.988183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-10T12:22:30.988203Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:30.988226Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-10T12:22:30.988268Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:30.988374Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:30.988462Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-10T12:22:30.988504Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-10T12:22:30.988550Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:22:30.988571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-10T12:22:30.988592Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:30.988615Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-10T12:22:30.988651Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-10T12:22:30.988680Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:22:30.988703Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:30.988723Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-10T12:22:30.988745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-10T12:22:30.988841Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:30.989031Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:22:30.989071Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-10T12:22:30.989104Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1271:3042], 0} after executionsCount# 1 2026-01-10T12:22:30.989138Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1271:3042], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:30.989188Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1271:3042], 0} finished in read 2026-01-10T12:22:30.989235Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:22:30.989259Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-10T12:22:30.989282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-10T12:22:30.989305Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-10T12:22:30.989338Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-10T12:22:30.989360Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-10T12:22:30.989384Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-10T12:22:30.989408Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-10T12:22:30.989466Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-10T12:22:30.989627Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-10T12:22:30.989812Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:970:2824]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-10T12:22:30.991704Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1270:3041], Recipient [2:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:30.991767Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:22:30.991936Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1271:3042], Recipient [2:970:2824]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:30.991966Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2026-01-10T12:22:27.410441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:27.411453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:27.411497Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:27.423625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:27.423996Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:27.424282Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:27.465397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:27.474413Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:27.475021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:27.476692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:27.476762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:27.476810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:27.477158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:27.477238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:27.477327Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:27.550179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:27.579142Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:27.579329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:27.579428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:27.579483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:27.579525Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:27.579556Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:27.579805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.579854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.580255Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:27.580367Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:27.580434Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:27.580486Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:27.580519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:27.580552Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:27.580600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:27.580633Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:27.580673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:27.580764Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.580798Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.580862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:27.590059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:27.590142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:27.590243Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:27.590419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:27.590476Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:27.590539Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:27.590596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:27.590629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:27.590663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:27.590694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:27.590974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:27.591022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:27.591057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:27.591087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:27.591151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:27.591188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:27.591219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:27.591255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:27.591279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:27.603309Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:27.603375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:27.603421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:27.603481Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:27.603548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:27.604094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.604161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:27.604206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:27.604278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:27.604316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:27.604428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:27.604461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:27.604493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:27.604525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:27.612167Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:27.612252Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:27.612447Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.612491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:27.612540Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:27.612579Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:27.612612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:27.612655Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:27.612686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 12:22:32.368214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2026-01-10T12:22:32.368267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2026-01-10T12:22:32.368315Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:32.368337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-10T12:22:32.368379Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:22:32.368428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:32.368454Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:32.368562Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:32.368586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-10T12:22:32.368621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:22:32.368665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:32.368689Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:32.368921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:32.368964Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:32.368995Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-10T12:22:32.369212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:32.369245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:32.369286Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-10T12:22:32.369342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:238:2230], Recipient [1:458:2400]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-10T12:22:32.369370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:32.369400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-10T12:22:32.369462Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-10T12:22:32.369504Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-10T12:22:32.369566Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:32.369700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:32.369926Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:32.369954Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:32.370039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:458:2400], Recipient [1:458:2400]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:32.370085Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:32.370140Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-10T12:22:32.370176Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:32.370209Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-10T12:22:32.370242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-10T12:22:32.370276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:32.370304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-10T12:22:32.370334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-10T12:22:32.370372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-10T12:22:32.370404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:32.370428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-10T12:22:32.370450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-10T12:22:32.370472Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-10T12:22:32.370946Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-10T12:22:32.370996Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:32.371042Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-10T12:22:32.371068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-10T12:22:32.371094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-10T12:22:32.371132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:32.371347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-10T12:22:32.371374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-10T12:22:32.371401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-10T12:22:32.371432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-10T12:22:32.371461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:32.371484Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-10T12:22:32.371507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-10T12:22:32.371554Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:32.371597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:22:32.371626Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-10T12:22:32.371668Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-10T12:22:32.371879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:32.371913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:32.371941Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-10T12:22:32.385954Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:32.386008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:32.386086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:32.386169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:32.386224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:32.386476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:32.386529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:32.386575Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> TableCreation::CreateOldTable [GOOD] >> TxUsage::WriteToTopic_Demo_27_Query >> DataShardTxOrder::DelayData [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2026-01-10T12:22:25.304016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:25.305171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:25.305224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:25.316296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:25.316680Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:25.317000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:25.367173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:25.377170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:25.377840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:25.379652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:25.379734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:25.379786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:25.380240Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:25.380325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:25.380409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:25.469320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:25.501196Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:25.501358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:25.501434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:25.501478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:25.501505Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:25.501533Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:25.501724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.501764Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.502022Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:25.502117Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:25.502177Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:25.502227Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:25.502263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:25.502289Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:25.502332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:25.502358Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:25.502388Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:25.502454Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.502478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.502537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:25.508726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:25.508799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:25.508911Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:25.509120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:25.509188Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:25.509253Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:25.509307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:25.509350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:25.509383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:25.509416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:25.509718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:25.509758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:25.509816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:25.509850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:25.509925Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:25.509956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:25.509991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:25.510023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:25.510046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:25.522022Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:25.522081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:25.522113Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:25.522168Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:25.522245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:25.522687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.522732Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:25.522777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:25.522868Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:25.522924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:25.523049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:25.523088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:25.523128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:25.523164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:25.526167Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:25.526225Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:25.526389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.526415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:25.526456Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:25.526504Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:25.526527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:25.526557Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:25.526583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2026-01-10T12:22:35.984676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:35.984701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:35.984727Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2026-01-10T12:22:35.984972Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.985003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.985035Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.985056Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:35.985077Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2026-01-10T12:22:35.985103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2026-01-10T12:22:35.985133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.985155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2026-01-10T12:22:35.985182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2026-01-10T12:22:35.985198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2026-01-10T12:22:35.985600Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2026-01-10T12:22:35.985630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.985646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2026-01-10T12:22:35.985661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-10T12:22:35.985680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2026-01-10T12:22:35.985714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.985729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-10T12:22:35.985741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:35.985753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:22:35.985781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2026-01-10T12:22:35.985809Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2026-01-10T12:22:35.985834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2026-01-10T12:22:35.985868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.985886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:35.985899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2026-01-10T12:22:35.985918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2026-01-10T12:22:35.985947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.985960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2026-01-10T12:22:35.985973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2026-01-10T12:22:35.985985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2026-01-10T12:22:35.985999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.986011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2026-01-10T12:22:35.986035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2026-01-10T12:22:35.986050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2026-01-10T12:22:35.986065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.986077Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2026-01-10T12:22:35.986097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2026-01-10T12:22:35.986117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2026-01-10T12:22:35.986130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.986143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2026-01-10T12:22:35.986160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:22:35.986176Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2026-01-10T12:22:35.986190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.986200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:22:35.986212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:22:35.986236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2026-01-10T12:22:35.986453Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2026-01-10T12:22:35.986486Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:35.986517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:35.986539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:22:35.986577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2026-01-10T12:22:35.986599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2026-01-10T12:22:35.986685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is DelayComplete 2026-01-10T12:22:35.986701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2026-01-10T12:22:35.986719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2026-01-10T12:22:35.986735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2026-01-10T12:22:35.986751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-10T12:22:35.986799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2026-01-10T12:22:35.986825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:507] at 9437184 has finished 2026-01-10T12:22:35.986868Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:35.986890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:35.986910Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:35.986939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:36.001045Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2026-01-10T12:22:36.001125Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-10T12:22:36.001206Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:36.001252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2026-01-10T12:22:36.001313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:36.001362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:36.001535Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:36.001585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2026-01-10T12:22:36.001620Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:36.001666Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2026-01-10T12:22:16.487988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707204673417202:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:16.488145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:16.691724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:16.691811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:16.753472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:16.801787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:17.106980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:17.107010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:17.107073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:17.107157Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:17.300203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:17.496268Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:19.350799Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.353770Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:22:19.353843Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:22:19.353864Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.354776Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.354810Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Creating table 2026-01-10T12:22:19.354876Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:19.355183Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.355205Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Creating table 2026-01-10T12:22:19.355233Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:22:19.355336Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Describe result: PathErrorUnknown 2026-01-10T12:22:19.355340Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Creating table 2026-01-10T12:22:19.355350Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:22:19.359545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.361218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.363426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.379601Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:22:19.379598Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:22:19.379641Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Subscribe on create table tx: 281474976710659 2026-01-10T12:22:19.379641Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Subscribe on create table tx: 281474976710658 2026-01-10T12:22:19.379725Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:22:19.379758Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Subscribe on create table tx: 281474976710660 2026-01-10T12:22:19.381994Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:22:19.382022Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:22:19.382030Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:22:19.472618Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:22:19.499663Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593707217558319887:2488] Owner: [1:7593707217558319886:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:22:19.503643Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:22:19.525571Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-10T12:22:19.525660Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Column diff is empty, finishing 2026-01-10T12:22:19.526469Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:22:19.527572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.528508Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.528545Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7593707217558319889:2490] Owner: [1:7593707217558319886:2487]. Successful alter request: ExecComplete 2026-01-10T12:22:19.574709Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:22:19.574748Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Column diff is empty, finishing 2026-01-10T12:22:19.574803Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:19.575789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.576943Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707217558319888:2489] Owner: [1:7593707217558319886:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710662 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.576973Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_ex ... 0], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-10T12:22:35.118535Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976025954.433104s seconds to be completed 2026-01-10T12:22:35.120374Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, workerId: [3:7593707286402008109:2507], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:22:35.120514Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:22:35.120780Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593707286402008106:2929], ActorId: [3:7593707286402008107:2930], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph, retry_state, user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-10T12:22:35.121068Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 38, targetId: [3:7593707286402008109:2507] 2026-01-10T12:22:35.121096Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 38 timeout: 300.000000s actor id: [3:7593707286402008111:2931] 2026-01-10T12:22:35.127326Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 38, sender: [3:7593707286402008110:2508], selfId: [3:7593707256337235594:2266], source: [3:7593707286402008109:2507] 2026-01-10T12:22:35.127683Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593707286402008106:2929], ActorId: [3:7593707286402008107:2930], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, TxId: 2026-01-10T12:22:35.128348Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593707286402008106:2929], ActorId: [3:7593707286402008107:2930], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, TxId: 2026-01-10T12:22:35.128384Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593707286402008106:2929], ActorId: [3:7593707286402008107:2930], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-10T12:22:35.128468Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7593707286402008105:2928], ActorId: [3:7593707286402008106:2929], TraceId: ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d, RequestDatabase: /dc-1, Got response [3:7593707286402008107:2930] SUCCESS 2026-01-10T12:22:35.128525Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593707286402008104:2927] ActorId: [3:7593707286402008105:2928] Database: /dc-1 ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d. Extracted script execution operation [3:7593707286402008107:2930], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7593707269222138322:2665], LeaseGeneration: 0 2026-01-10T12:22:35.128548Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7593707286402008104:2927] ActorId: [3:7593707286402008105:2928] Database: /dc-1 ExecutionId: e29f4c18-9a96d750-a9d00964-6013665d. Reply success 2026-01-10T12:22:35.128664Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZDliNWYwZGQtMTAwMzBiOGMtYWVjMThlNGYtZTRmYTE4Yzg=, workerId: [3:7593707286402008109:2507], local sessions count: 0 2026-01-10T12:22:35.144546Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxqfw80xe0b8wb64x7k2xv", Request has 18444976025954.407101s seconds to be completed 2026-01-10T12:22:35.147114Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxqfw80xe0b8wb64x7k2xv", Created new session, sessionId: ydb://session/3?node_id=3&id=MzFiZDdiZTEtM2YxNjZjZjAtMjNjY2E0LWJlMmZlMzQy, workerId: [3:7593707286402008140:2521], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:22:35.147366Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxqfw80xe0b8wb64x7k2xv 2026-01-10T12:22:35.155582Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekxqfwkakve65enhg3evsba, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MzFiZDdiZTEtM2YxNjZjZjAtMjNjY2E0LWJlMmZlMzQy, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7593707286402008140:2521] 2026-01-10T12:22:35.155632Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 600.000000s actor id: [3:7593707286402008144:2940] 2026-01-10T12:22:35.170735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:35.175571Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxqfwkakve65enhg3evsba", Forwarded response to sender actor, requestId: 40, sender: [3:7593707286402008142:2522], selfId: [3:7593707256337235594:2266], source: [3:7593707286402008140:2521] --------------------------- INIT FINISHED --------------------------- 2026-01-10T12:22:35.184048Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Describe result: PathErrorUnknown 2026-01-10T12:22:35.184090Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Creating table 2026-01-10T12:22:35.184145Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-10T12:22:35.189569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:35.190483Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710686 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-10T12:22:35.190521Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Subscribe on create table tx: 281474976710686 2026-01-10T12:22:35.192691Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Subscribe on tx: 281474976710686 registered 2026-01-10T12:22:35.221075Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Request: create. Transaction completed: 281474976710686. Doublechecking... 2026-01-10T12:22:35.315070Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:35.315122Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707286402008154:2947] Owner: [3:7593707286402008153:2946]. Column diff is empty, finishing 2026-01-10T12:22:35.315564Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707286402008280:3047] Owner: [3:7593707286402008279:3046]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:35.315596Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707286402008280:3047] Owner: [3:7593707286402008279:3046]. Column diff is empty, finishing 2026-01-10T12:22:35.328323Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxqg1z50ekz93p39rg6jz3", Request has 18444976025954.223326s seconds to be completed 2026-01-10T12:22:35.331002Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxqg1z50ekz93p39rg6jz3", Created new session, sessionId: ydb://session/3?node_id=3&id=NGUxOTAwZGQtY2JlZDA4MTMtMTU0MzVkYzQtN2RiNWNhMzg=, workerId: [3:7593707286402008286:2531], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:22:35.331245Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxqg1z50ekz93p39rg6jz3 2026-01-10T12:22:35.347612Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NGUxOTAwZGQtY2JlZDA4MTMtMTU0MzVkYzQtN2RiNWNhMzg=, workerId: [3:7593707286402008286:2531], local sessions count: 1 2026-01-10T12:22:35.352310Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MzFiZDdiZTEtM2YxNjZjZjAtMjNjY2E0LWJlMmZlMzQy, workerId: [3:7593707286402008140:2521], local sessions count: 0 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::LocksBrokenStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:30.019458Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:30.019703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:30.101446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:30.102553Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:30.108807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:30.109174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:30.109411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:30.111015Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:30.111228Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:30.111352Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:30.432008Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:30.538458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:30.538546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:30.539087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:30.539133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:30.583266Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:22:30.584100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:30.584576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:30.673308Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:30.733220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:31.338806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:31.406328Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:31.406471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:31.729465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:31.803077Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:31.807413Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:31.807743Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1615:2397] 2026-01-10T12:22:31.807980Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:31.857016Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:31.861891Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:31.862050Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:31.863587Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:31.863664Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:31.863733Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:31.864147Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:31.864424Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:31.864506Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1639:2397] in generation 1 2026-01-10T12:22:31.867500Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:31.912166Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:31.912316Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:31.912395Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1643:2414] 2026-01-10T12:22:31.912434Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:31.912466Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:31.912492Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:31.912716Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1615:2397], Recipient [2:1615:2397]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:31.912749Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:31.913023Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:31.913093Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:31.913238Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:31.913277Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:31.913308Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:31.913334Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:31.913357Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:31.913381Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:31.913411Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:31.913487Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1612:2395], Recipient [2:1615:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:31.913517Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:31.913547Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1601:3051], serverId# [2:1612:2395], sessionId# [0:0:0] 2026-01-10T12:22:31.914156Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:822:2464], Recipient [2:1612:2395] 2026-01-10T12:22:31.914203Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:31.914283Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:31.914479Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:31.914525Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:31.914591Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:31.914631Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:31.914676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:31.914710Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:31.914739Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:31.914965Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution ... ecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3502 UnfrozenTablets: 72075186224037888 2026-01-10T12:22:35.783180Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:35.783219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976715670] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:22:35.783249Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715670 2026-01-10T12:22:35.783285Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715670 2026-01-10T12:22:35.783311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976715670] at 72075186224037888 on unit CompleteWrite 2026-01-10T12:22:35.783350Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:35.783408Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:22:35.783441Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:22:35.783458Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:22:35.783525Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3502} 2026-01-10T12:22:35.783684Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:22:35.783719Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976715670] at 72075186224037890 on unit ExecuteWrite 2026-01-10T12:22:35.783748Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976715670 2026-01-10T12:22:35.783773Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976715670 2026-01-10T12:22:35.783793Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3502:281474976715670] at 72075186224037890 on unit CompleteWrite 2026-01-10T12:22:35.783821Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:22:35.783866Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-10T12:22:35.783888Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-10T12:22:35.784247Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2474:2538], Recipient [2:2346:2508]: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:22:35.784286Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:35.784324Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715670 2026-01-10T12:22:35.784393Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:22:35.784499Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976715670] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2565:3612] 2026-01-10T12:22:35.784688Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2346:2508], Recipient [2:2474:2538]: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-10T12:22:35.784713Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:35.784744Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715670 2026-01-10T12:22:35.784789Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-10T12:22:35.784869Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976715670] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2567:3612] 2026-01-10T12:22:35.785318Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:35.785520Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:22:35.786052Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2346:2508], Recipient [1:2458:3586] 2026-01-10T12:22:35.786097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:35.786128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715670 2026-01-10T12:22:35.786183Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-10T12:22:35.786256Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:22:35.786517Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:65:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3502 UnfrozenTablets: 72075186224037890 2026-01-10T12:22:35.786687Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:65:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3502} 2026-01-10T12:22:35.787111Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2474:2538], Recipient [1:2458:3586] 2026-01-10T12:22:35.787164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:35.787195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715670 2026-01-10T12:22:35.787238Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-10T12:22:35.787331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3502 : 281474976715670] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2566:3612] 2026-01-10T12:22:35.787907Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:22:35.796291Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:22:35.796886Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2406:3545], Recipient [2:2346:2508] 2026-01-10T12:22:35.796939Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.797001Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715670 2026-01-10T12:22:35.797085Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2406:3545], Recipient [2:2474:2538] 2026-01-10T12:22:35.797111Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.797144Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715670 2026-01-10T12:22:35.797424Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-10T12:22:35.797499Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-10T12:22:35.797688Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2474:2538], Recipient [2:2346:2508]: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2026-01-10T12:22:35.797734Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.797768Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715670 2026-01-10T12:22:35.797833Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2346:2508], Recipient [2:2474:2538]: {TEvReadSet step# 3502 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-10T12:22:35.797859Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.797892Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715670 2026-01-10T12:22:35.798491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2346:2508], Recipient [1:2406:3545] 2026-01-10T12:22:35.798531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.798568Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715670 2026-01-10T12:22:35.798747Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2474:2538], Recipient [1:2406:3545] 2026-01-10T12:22:35.798781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:35.798815Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715670 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2026-01-10T12:22:18.557927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:18.558785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:18.558823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:18.567419Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:18.567680Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:18.568665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:18.608643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:18.622142Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:18.627705Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:18.630946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:18.631058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:18.631111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:18.632204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:18.632379Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:18.632458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:18.721540Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:18.753379Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:18.753568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:18.753677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:18.753743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:18.753798Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:18.753831Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.754072Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.754138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.754468Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:18.754575Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:18.754636Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.754693Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:18.754939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:18.755002Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:18.755052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:18.755086Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:18.755130Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:18.755231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.755266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.755321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:18.763755Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:18.763833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:18.763930Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:18.764178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:18.764242Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:18.764306Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:18.764363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:18.764414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:18.764450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:18.764482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.764773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:18.764810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:18.764860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:18.764892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.764970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:18.765007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:18.765036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:18.765064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.765096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:18.777270Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:18.777344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.777378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.777438Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:18.777823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:18.779948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.780004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.780052Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:18.780151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:18.780179Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:18.780308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.780352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:18.780389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:18.780418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:18.786744Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:18.786806Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.787031Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.787125Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.787187Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.787235Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:18.787272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:18.787315Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:18.787352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-10T12:22:37.250926Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:37.251173Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [6:347:2315], Recipient [6:347:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.251218Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.251268Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-10T12:22:37.251303Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:37.251334Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:22:37.251370Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-01-10T12:22:37.251402Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-01-10T12:22:37.251435Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.251464Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-01-10T12:22:37.251492Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-01-10T12:22:37.251571Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-01-10T12:22:37.252243Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-01-10T12:22:37.252294Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.252322Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-01-10T12:22:37.252351Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-01-10T12:22:37.252381Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-01-10T12:22:37.252421Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.252444Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-01-10T12:22:37.252470Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:37.252498Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-01-10T12:22:37.252549Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-01-10T12:22:37.252584Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-01-10T12:22:37.252617Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-01-10T12:22:37.252658Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.252684Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:37.252709Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-01-10T12:22:37.252735Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-01-10T12:22:37.252804Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.252832Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-01-10T12:22:37.252857Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-01-10T12:22:37.252884Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-01-10T12:22:37.252912Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.252935Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-01-10T12:22:37.252959Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-01-10T12:22:37.252984Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-01-10T12:22:37.253021Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.253073Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-01-10T12:22:37.253124Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-01-10T12:22:37.253151Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-01-10T12:22:37.253180Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.253204Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-01-10T12:22:37.253229Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-01-10T12:22:37.253254Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-01-10T12:22:37.253280Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.253300Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-01-10T12:22:37.253323Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-01-10T12:22:37.253349Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-01-10T12:22:37.253766Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-01-10T12:22:37.253844Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:37.253904Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.253933Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-01-10T12:22:37.253961Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-01-10T12:22:37.253991Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-01-10T12:22:37.254177Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-01-10T12:22:37.254206Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-01-10T12:22:37.254234Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-01-10T12:22:37.254264Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-01-10T12:22:37.254296Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:22:37.254320Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-01-10T12:22:37.254347Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-01-10T12:22:37.254377Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:37.254405Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:22:37.254434Z node 6 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-10T12:22:37.254468Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:22:37.268067Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-10T12:22:37.268173Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-10T12:22:37.268256Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:22:37.268308Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-10T12:22:37.268384Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:22:37.268446Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-10T12:22:37.269012Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-10T12:22:37.269060Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-10T12:22:37.269105Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:37.269133Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-10T12:22:37.269177Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:22:37.269213Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:29.232831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:29.315924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:29.324454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:29.324789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:29.324875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:29.605423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:29.605530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:29.611099Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047747012412 != 1768047747012415 2026-01-10T12:22:29.627250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:29.672189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:29.762411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:30.027870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:30.042169Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:30.141075Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:22:30.141134Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:22:30.141277Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:22:30.240550Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:22:30.240650Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:22:30.241374Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:22:30.241509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:22:30.241916Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:22:30.242084Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:22:30.242245Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:22:30.244355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:30.244818Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:22:30.245607Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:22:30.245685Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:22:30.276914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:30.277818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:30.278045Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-10T12:22:30.278265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:30.313496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:30.313809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:30.314684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:30.314844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:30.316294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:30.316382Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:30.316449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:30.316768Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:30.316843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:30.317084Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-10T12:22:30.317238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:30.322961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:30.323086Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:919:2768] in generation 1 2026-01-10T12:22:30.323290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:30.324386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:30.324492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:30.325654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:22:30.325700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:22:30.325740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:22:30.325953Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:30.326061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:30.326118Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-10T12:22:30.336955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:30.372376Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:30.372585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:30.372721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-10T12:22:30.372761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:30.372799Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:30.372834Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:30.373106Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.373162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.373271Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:30.373317Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:22:30.373388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:30.373476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-10T12:22:30.373502Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:22:30.373525Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:22:30.373549Z node 1 :TX ... Restarts 2026-01-10T12:22:37.225866Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:22:37.225905Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:22:37.225943Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:37.226037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:37.226076Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:22:37.226112Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:37.226146Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:37.226195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:37.226220Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:37.226247Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T12:22:37.236926Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:22:37.236991Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:37.237036Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:22:37.237129Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:37.238935Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-10T12:22:37.238984Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 281474976715661 ProcessProposeTransaction 2026-01-10T12:22:37.239043Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:1058:2877] DataReq marker# P0 2026-01-10T12:22:37.239156Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:1058:2877] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2026-01-10T12:22:37.239381Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:1058:2877] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2026-01-10T12:22:37.239588Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:1058:2877] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-10T12:22:37.239678Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:1058:2877] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2026-01-10T12:22:37.239966Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1058 RawX2: 8589937469 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020&\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\"\004\000\000\000\000\000\000\021=\013\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2026-01-10T12:22:37.240015Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:37.240118Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:37.240265Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:38:0] 2026-01-10T12:22:37.240339Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2026-01-10T12:22:37.240385Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-10T12:22:37.240427Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2026-01-10T12:22:37.240463Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:37.240494Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:37.240531Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:22:37.240585Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715661] at 72075186224037888 2026-01-10T12:22:37.240617Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-10T12:22:37.240637Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:37.240652Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2026-01-10T12:22:37.240672Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2026-01-10T12:22:37.240689Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-10T12:22:37.240716Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2026-01-10T12:22:37.240754Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2026-01-10T12:22:37.240777Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-10T12:22:37.240831Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:1058:2877] for [0:281474976715661] at 72075186224037888 2026-01-10T12:22:37.240855Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-10T12:22:37.240896Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:22:37.240968Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2026-01-10T12:22:37.241034Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976715661 2026-01-10T12:22:37.241067Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2026-01-10T12:22:37.241160Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287942, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2026-01-10T12:22:37.241194Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2026-01-10T12:22:37.241269Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1058:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-10T12:22:37.241303Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-10T12:22:37.241371Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.241408Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.241461Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:37.241503Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:22:37.241545Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-10T12:22:37.241574Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-10T12:22:37.241618Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715661] at 72075186224037888 2026-01-10T12:22:37.241657Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-10T12:22:37.241694Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-10T12:22:37.241726Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2026-01-10T12:22:37.241758Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2026-01-10T12:22:37.241936Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-10T12:22:37.241979Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-10T12:22:37.242014Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:22:37.242058Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:37.242098Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:37.242160Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:37.242527Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1064:2882], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:22:37.242610Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] Test command err: 2026-01-10T12:22:16.487937Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707206584864758:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:16.488044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:16.700614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:16.700733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:16.792543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:16.841901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:17.106975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:17.107007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:17.107067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:17.107172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:17.301314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:17.495431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:18.977336Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:18.983462Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:22:18.983515Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:22:18.983540Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:18.984943Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:18.984974Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Creating table 2026-01-10T12:22:18.985025Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:18.985027Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:18.985038Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Creating table 2026-01-10T12:22:18.985076Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:22:18.985258Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Describe result: PathErrorUnknown 2026-01-10T12:22:18.985285Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Creating table 2026-01-10T12:22:18.985307Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-10T12:22:18.991833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:18.993870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:18.995805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:19.007182Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-10T12:22:19.007212Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-10T12:22:19.007243Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Subscribe on create table tx: 281474976710659 2026-01-10T12:22:19.007246Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Subscribe on create table tx: 281474976710660 2026-01-10T12:22:19.007307Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-10T12:22:19.007319Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Subscribe on create table tx: 281474976710658 2026-01-10T12:22:19.010307Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Subscribe on tx: 281474976710658 registered 2026-01-10T12:22:19.010333Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Subscribe on tx: 281474976710659 registered 2026-01-10T12:22:19.010342Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Subscribe on tx: 281474976710660 registered 2026-01-10T12:22:19.113100Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-10T12:22:19.137875Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-10T12:22:19.140738Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7593707215174800149:2489] Owner: [1:7593707215174800146:2486]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-10T12:22:19.207264Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-10T12:22:19.207366Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Column diff is empty, finishing 2026-01-10T12:22:19.208527Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-10T12:22:19.209727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.210824Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.210855Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7593707215174800148:2488] Owner: [1:7593707215174800146:2486]. Successful alter request: ExecComplete 2026-01-10T12:22:19.225175Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-10T12:22:19.225288Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Column diff is empty, finishing 2026-01-10T12:22:19.225399Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-10T12:22:19.226501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:19.227495Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7593707215174800147:2487] Owner: [1:7593707215174800146:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710662 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-10T12:22:19.227517Z node 1 :KQP_PROXY I ... eCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710690 } 2026-01-10T12:22:36.924137Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7593707291624046720:2927] Owner: [3:7593707291624046719:2926]. Subscribe on create table tx: 281474976710690 2026-01-10T12:22:36.924367Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710685 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710690 } 2026-01-10T12:22:36.924392Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. Subscribe on create table tx: 281474976710690 2026-01-10T12:22:36.926230Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046738:2945] Owner: [3:7593707291624046737:2944]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926244Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926251Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046720:2927] Owner: [3:7593707291624046719:2926]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926257Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046732:2939] Owner: [3:7593707291624046731:2938]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926263Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046724:2931] Owner: [3:7593707291624046723:2930]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926269Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046726:2933] Owner: [3:7593707291624046725:2932]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926277Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046734:2941] Owner: [3:7593707291624046733:2940]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926283Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046736:2943] Owner: [3:7593707291624046735:2942]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926288Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046728:2935] Owner: [3:7593707291624046727:2934]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.926294Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7593707291624046722:2929] Owner: [3:7593707291624046721:2928]. Subscribe on tx: 281474976710690 registered 2026-01-10T12:22:36.951050Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046736:2943] Owner: [3:7593707291624046735:2942]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951145Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046720:2927] Owner: [3:7593707291624046719:2926]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951161Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046726:2933] Owner: [3:7593707291624046725:2932]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951176Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046732:2939] Owner: [3:7593707291624046731:2938]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951192Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046738:2945] Owner: [3:7593707291624046737:2944]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951210Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046728:2935] Owner: [3:7593707291624046727:2934]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951228Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046722:2929] Owner: [3:7593707291624046721:2928]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951244Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046734:2941] Owner: [3:7593707291624046733:2940]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951260Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046724:2931] Owner: [3:7593707291624046723:2930]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:36.951277Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. Request: create. Transaction completed: 281474976710690. Doublechecking... 2026-01-10T12:22:37.007840Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046732:2939] Owner: [3:7593707291624046731:2938]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.007885Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046732:2939] Owner: [3:7593707291624046731:2938]. Column diff is empty, finishing 2026-01-10T12:22:37.008244Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046736:2943] Owner: [3:7593707291624046735:2942]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.008277Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046736:2943] Owner: [3:7593707291624046735:2942]. Column diff is empty, finishing 2026-01-10T12:22:37.009188Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046724:2931] Owner: [3:7593707291624046723:2930]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.009217Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046724:2931] Owner: [3:7593707291624046723:2930]. Column diff is empty, finishing 2026-01-10T12:22:37.009234Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046738:2945] Owner: [3:7593707291624046737:2944]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.009254Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046738:2945] Owner: [3:7593707291624046737:2944]. Column diff is empty, finishing 2026-01-10T12:22:37.009280Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046728:2935] Owner: [3:7593707291624046727:2934]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.009295Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046728:2935] Owner: [3:7593707291624046727:2934]. Column diff is empty, finishing 2026-01-10T12:22:37.016492Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046720:2927] Owner: [3:7593707291624046719:2926]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.016530Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046720:2927] Owner: [3:7593707291624046719:2926]. Column diff is empty, finishing 2026-01-10T12:22:37.017393Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.017420Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046730:2937] Owner: [3:7593707291624046729:2936]. Column diff is empty, finishing 2026-01-10T12:22:37.017902Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046722:2929] Owner: [3:7593707291624046721:2928]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.017928Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046722:2929] Owner: [3:7593707291624046721:2928]. Column diff is empty, finishing 2026-01-10T12:22:37.028881Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046734:2941] Owner: [3:7593707291624046733:2940]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.028939Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046734:2941] Owner: [3:7593707291624046733:2940]. Column diff is empty, finishing 2026-01-10T12:22:37.046783Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7593707291624046726:2933] Owner: [3:7593707291624046725:2932]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:37.046833Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7593707291624046726:2933] Owner: [3:7593707291624046725:2932]. Column diff is empty, finishing 2026-01-10T12:22:37.061651Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxqhr52nvyje7ag9ky2a0b", Request has 18444976025952.489989s seconds to be completed 2026-01-10T12:22:37.063433Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxqhr52nvyje7ag9ky2a0b", Created new session, sessionId: ydb://session/3?node_id=3&id=Zjg2NDMzNDMtNTFjNzYxMTYtOWEwZGFjZDAtNGVhN2Y2NzQ=, workerId: [3:7593707295919014254:2513], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:22:37.063605Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxqhr52nvyje7ag9ky2a0b 2026-01-10T12:22:37.079671Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=Zjg2NDMzNDMtNTFjNzYxMTYtOWEwZGFjZDAtNGVhN2Y2NzQ=, workerId: [3:7593707295919014254:2513], local sessions count: 1 2026-01-10T12:22:37.082225Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MzFkOTU3YWItYmZiMmQzZjYtZTYxN2Q3YTgtNWY4NmE4YWU=, workerId: [3:7593707291624046706:2503], local sessions count: 0 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:29.460062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:29.550350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:29.556377Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:29.556738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:29.556809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:29.812676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:29.812773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:29.817313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047747199630 != 1768047747199633 2026-01-10T12:22:29.829735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:29.871590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:29.961613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:30.229842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:30.243967Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:30.344982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:30.374056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:30.374838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:30.375055Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:30.375248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:30.407019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:30.407570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:30.407645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:30.408868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:30.408920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:30.408981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:30.409250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:30.409347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:30.409403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:30.420124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:30.450358Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:30.450514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:30.450592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:30.450631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:30.450657Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:30.450686Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:30.450877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.450934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.451173Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:30.451235Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:30.451347Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:30.451384Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:30.451413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:30.451437Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:30.451459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:30.451482Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:30.451522Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:30.451604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.451636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.451684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:30.451748Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:30.451785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:30.451864Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:30.452021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:30.452052Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:30.452136Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:30.452178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:30.452231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:30.452276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:30.452299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:30.452555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:30.452584Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:30.452623Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:30.452647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:30.452689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:30.452709Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:30.452732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:30.452757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:30.452785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:30.454082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:30.454121Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:30.464771Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-01-10T12:22:38.230253Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:38.231900Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:38.232101Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1092:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1089:2903];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:38.232194Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1092:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:38.232520Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1084:2877] TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1089:2903] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 163190 Tasks { TaskId: 1 CpuTimeUs: 160491 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 12 BuildCpuTimeUs: 160479 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tjszgfszoq" NodeId: 2 CreateTimeMs: 1768047757613 CurrentWaitInputTimeUs: 125474 UpdateTimeMs: 1768047758230 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:38.232598Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1089:2903] 2026-01-10T12:22:38.232677Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1205} ActorId: [2:1084:2877] TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-10T12:22:38.232749Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1198} ActorId: [2:1084:2877] TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1090:2904] TaskId# 2 trace_id# 2026-01-10T12:22:38.232860Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1198} ActorId: [2:1084:2877] TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1091:2905] TaskId# 3 trace_id# 2026-01-10T12:22:38.232958Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1084:2877] TxId: 281474976715662. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:38.233246Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, ActorId: [2:1058:2877], ActorState: ExecuteState, LegacyTraceId: 01kekxqhvr7ezrv0g71mj6egc7, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-10T12:22:38.233552Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:22:38.233597Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-10T12:22:38.233652Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1090:2904], TxId: 281474976715662, task: 2. Ctx: { TraceId : 01kekxqhvr7ezrv0g71mj6egc7. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-01-10T12:22:38.233731Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1090:2904], TxId: 281474976715662, task: 2. Ctx: { TraceId : 01kekxqhvr7ezrv0g71mj6egc7. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1084:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-10T12:22:38.233870Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:38.234983Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:38.235093Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1091:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kekxqhvr7ezrv0g71mj6egc7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-01-10T12:22:38.235134Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1091:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kekxqhvr7ezrv0g71mj6egc7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1084:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-10T12:22:38.235216Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-10T12:22:38.252221Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-10T12:22:38.252674Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-10T12:22:38.252750Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-10T12:22:38.252794Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-10T12:22:38.252873Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1125:2937] SnapshotReq marker# P0 2026-01-10T12:22:38.253197Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1127:2937] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-10T12:22:38.253358Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1127:2937] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-10T12:22:38.253435Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1125:2937] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-10T12:22:38.253657Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1125:2937], Recipient [2:887:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-10T12:22:38.253807Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-10T12:22:38.253905Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:22:38.253958Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:1124:2877] TxId: 281474976715664. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:22:38.254050Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1124:2877] TxId: 281474976715664. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:38.254099Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1124:2877] TxId: 281474976715664. Ctx: { TraceId: 01kekxqhvr7ezrv0g71mj6egc7, Database: , SessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:22:38.254292Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1058:2877] 2026-01-10T12:22:38.254958Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=MjIwNDdhZGYtMzFmNGIzZjYtODMyOWViYTEtY2VmNjRmYmU=, workerId: [2:1058:2877], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 216 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2026-01-10T12:22:34.928392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:34.929433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:34.929473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:34.936983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:34.937321Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:34.937575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:34.971579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:34.979238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:34.979817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:34.981279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:34.981336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:34.981386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:34.981666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:34.981727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:34.981799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:35.050558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:35.081178Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:35.081341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:35.081450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:35.081498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:35.081530Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:35.081558Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.081772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.081820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.082048Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:35.082127Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:35.082168Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.082206Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:35.082247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:35.082275Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:35.082315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:35.082344Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:35.082376Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:35.082465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.082499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.082556Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:35.087718Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:35.087788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:35.087870Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:35.088017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:35.088071Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:35.088147Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:35.088192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:35.088221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:35.088248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:35.088274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.088531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:35.088569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:35.088598Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:35.088624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.088677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:35.088702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:35.088727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:35.088753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.088770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:35.100812Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:35.100915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.100955Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.101020Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:35.101096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:35.101706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.101780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.101835Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:35.101949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:35.102001Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:35.102124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.102165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:35.102204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:35.102240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:35.106109Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:35.106176Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.106386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.106437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.106494Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.106538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:35.106573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:35.106618Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:35.106658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:39.531420Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.531445Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:39.531515Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.531545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.531589Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.531647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-10T12:22:39.531695Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.531842Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.531879Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.531904Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.531931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.531967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.532018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:22:39.532045Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.532164Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.532188Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.532212Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.532238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.532302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.532351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:22:39.532409Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.532522Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.532546Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:39.532590Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.532622Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.532656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.532697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:22:39.532721Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.532832Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.532856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.532920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.532965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:22:39.533007Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.533107Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.533131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.533163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.533202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:39.533225Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.533321Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:39.533346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:39.533377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-10T12:22:39.533427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:39.533464Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:39.533702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-10T12:22:39.533738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.533768Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-10T12:22:39.533880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-10T12:22:39.533911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.533936Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-10T12:22:39.534014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:39.534041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534079Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-10T12:22:39.534170Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-10T12:22:39.534198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534255Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-10T12:22:39.534340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-10T12:22:39.534365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534389Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-10T12:22:39.534440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-10T12:22:39.534466Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534490Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-10T12:22:39.534575Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:39.534601Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534625Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:22:39.534702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:39.534746Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:39.534784Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:28.362816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:28.440924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:28.446741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:28.447006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:28.447077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:28.704778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:28.704886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:28.709573Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047746090155 != 1768047746090158 2026-01-10T12:22:28.719094Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:28.764185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:28.853432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:29.135193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:29.148631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:29.250394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:29.280344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:29.281327Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:29.281608Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:29.281851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:29.316265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:29.317057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:29.317169Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:29.318619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:29.318685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:29.318738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:29.319130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:29.319274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:29.319376Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:29.330069Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:29.371201Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:29.371409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:29.371519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:29.371567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:29.371634Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:29.371685Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:29.371938Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.372049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.372423Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:29.372522Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:29.372719Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:29.372782Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:29.372888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:29.372929Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:29.372982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:29.373020Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:29.373092Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:29.373196Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.373233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.373288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:29.373404Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:29.373473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:29.373591Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:29.373845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:29.373913Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:29.374031Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:29.374106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:29.374157Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:29.374213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:29.374252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:29.374563Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:29.374603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:29.374661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:29.374701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:29.374766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:29.374801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:29.374835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:29.374886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:29.374919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:29.376791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:29.376846Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:29.387614Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... reamingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-10T12:22:39.274977Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-10T12:22:39.275029Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:22:39.275241Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1411:3134] 2026-01-10T12:22:39.275293Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1411:3134], channels: 1 2026-01-10T12:22:39.275370Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:22:39.275420Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1411:3134], trace_id# 2026-01-10T12:22:39.275471Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1411:3134], trace_id# 2026-01-10T12:22:39.275515Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:22:39.276003Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1411:3134] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:22:39.276046Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1411:3134], trace_id# 2026-01-10T12:22:39.276113Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1411:3134], trace_id# 2026-01-10T12:22:39.276433Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1412:3134], Recipient [2:1349:3101]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-10T12:22:39.276520Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:39.276570Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v5001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v5001/18446744073709551615 2026-01-10T12:22:39.276614Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v6000/18446744073709551615 2026-01-10T12:22:39.276662Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:39.276723Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:39.276768Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:39.276796Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:39.276836Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:39.276864Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-10T12:22:39.276900Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:39.276917Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:39.276932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:39.276948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:39.277030Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:22:39.277245Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1412:3134], 0} after executionsCount# 1 2026-01-10T12:22:39.277289Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1412:3134], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:39.277353Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1412:3134], 0} finished in read 2026-01-10T12:22:39.277397Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:39.277417Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:39.277434Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:39.277453Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:39.277479Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-10T12:22:39.277495Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:39.277514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-10T12:22:39.277539Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:39.278172Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1412:3134], Recipient [2:1349:3101]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:39.278212Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:22:39.278343Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1411:3134] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 484 Tasks { TaskId: 1 CpuTimeUs: 143 FinishTimeMs: 1768047759277 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 58 BuildCpuTimeUs: 85 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047759277 CreateTimeMs: 1768047759275 UpdateTimeMs: 1768047759277 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:39.278398Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1411:3134] 2026-01-10T12:22:39.278493Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:39.278540Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1407:3134] TxId: 281474976715671. Ctx: { TraceId: 01kekxqktxdh0h86kqd3ygz2jy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRiMGIwNTktOGEyNGFhNTctMjBkY2Y0Y2ItZmY3NDk1YzA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000484s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 7 } items { uint32_value: 4 } } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2026-01-10T12:22:34.504984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:34.505953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:34.506003Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:34.516882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:34.517252Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:34.517486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:34.551781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:34.560829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:34.561547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:34.563633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:34.563712Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:34.563774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:34.564177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:34.564275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:34.564368Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:34.654900Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:34.684451Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:34.684617Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:34.684695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:34.684748Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:34.684779Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:34.684812Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:34.685060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:34.685108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:34.685343Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:34.685464Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:34.685515Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:34.685558Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:34.685595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:34.685628Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:34.685670Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:34.685700Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:34.685745Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:34.685820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:34.685846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:34.685904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:34.691776Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:34.691860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:34.691942Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:34.692114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:34.692166Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:34.692216Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:34.692259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:34.692290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:34.692320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:34.692346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:34.692602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:34.692662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:34.692702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:34.692736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:34.692810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:34.692864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:34.692898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:34.692949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:34.692975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:34.705040Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:34.705141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:34.705183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:34.705225Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:34.705281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:34.705752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:34.705812Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:34.705857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:34.705914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:34.705940Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:34.706049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:34.706079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:34.706108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:34.706151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:34.708751Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:34.708813Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:34.709006Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:34.709037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:34.709079Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:34.709117Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:34.709143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:34.709177Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:34.709207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompleteOperation 2026-01-10T12:22:40.094374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.094518Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is DelayComplete 2026-01-10T12:22:40.094549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2026-01-10T12:22:40.094580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2026-01-10T12:22:40.094610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2026-01-10T12:22:40.094641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is Executed 2026-01-10T12:22:40.094663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2026-01-10T12:22:40.094707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:154] at 9437184 has finished 2026-01-10T12:22:40.094733Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:40.094755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:40.094782Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:40.094809Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:40.114414Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.114480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.114541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:40.114615Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:40.114656Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.114827Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.114870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.114907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:40.114956Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.115074Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.115101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.115145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:40.115194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:40.115220Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.115330Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.115353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.115382Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:22:40.115432Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:40.115461Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.115563Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.115587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.115616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:40.115641Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.115752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:40.115777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-10T12:22:40.115833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-10T12:22:40.115874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:40.115902Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.116119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:40.116166Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.116206Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-10T12:22:40.116304Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:40.116330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-10T12:22:40.116382Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-10T12:22:40.116440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:40.116466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:40.116574Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:40.116608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:40.116650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-10T12:22:40.116688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:40.116725Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:40.117013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:40.117071Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.117103Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:22:40.117165Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:40.117187Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.117226Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-10T12:22:40.117331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:458:2400], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:40.117357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.117380Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-10T12:22:40.117434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:40.117459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.117481Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:40.117563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:40.117603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.117627Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_46_Query >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2026-01-10T12:22:16.487916Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707204860626197:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:16.487998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:16.749570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:16.749708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:16.805589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:22:16.805829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:16.807623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:17.011761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:22:17.011837Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:148: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2026-01-10T12:22:17.023954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:17.494751Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:18.952064Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:18.960347Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-10T12:22:18.962989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707213450561592:2302], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.963099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.963472Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-10T12:22:18.963505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707213450561601:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.963563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.963594Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:22:18.963630Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:22:18.963647Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:18.963863Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7593707209155594241:2479], selfId: [1:7593707204860626421:2266], source: [1:7593707204860626421:2266] 2026-01-10T12:22:18.964370Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-10T12:22:18.964441Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-10T12:22:18.964515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707213450561603:2304], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.964517Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7593707209155594241:2479], selfId: [1:7593707204860626421:2266], source: [1:7593707204860626421:2266] 2026-01-10T12:22:18.964576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.964751Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2026-01-10T12:22:18.964786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707213450561605:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.964789Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq } 2026-01-10T12:22:18.964827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:18.964860Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 4, sender: [1:7593707209155594241:2479], selfId: [1:7593707204860626421:2266], source: [1:7593707204860626421:2266] ... waiting for SysViewsRoster update finished 2026-01-10T12:22:21.825941Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:21.834874Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:21.835097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:21.835138Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:22.036394Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:22.036492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:22.055227Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047739351582 != 1768047739351586 2026-01-10T12:22:22.058457Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:22.100191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:22.181501Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:22.451057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:22.465347Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:22.466133Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:305:2349], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:22:22.466484Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:305:2349], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:22:22.468066Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:305:2349], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-10T12:22:22.468232Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:305:2349], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:840:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:22:22.468357Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:305:2349], cacheItem# { Subscriber: ... ionId: ydb://session/3?node_id=5&id=YTJmYTY0MWMtOGM4YWJhZmYtN2M5ZTNjZWUtNjZmOGY1MGI=, PoolId: , DatabaseId: , CustomerSuppliedId: 01kekxqk0t8ywd4dakj6814xd8, CurrentExecutionId: a65990bb-776acdf7-cb4257c1-e77e074, RunScriptActorId: [5:7593707299212585905:3340], IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [5:7593707299212585983:2364] 2026-01-10T12:22:38.865068Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [5:7593707299212585986:3385] 2026-01-10T12:22:38.865270Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976025950.686356s seconds to be completed 2026-01-10T12:22:38.866561Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=ZjdhOTQ5MjAtZWU4YzVmYTAtZjJhMTdiYzctZThhNGU4MTI=, workerId: [5:7593707299212585989:2367], database: /Root, longSession: 1, local sessions count: 2 2026-01-10T12:22:38.866685Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:22:38.866735Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593707299212585987:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.866830Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.866982Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7593707299212585905:3340], ActorId: [5:7593707299212585984:3384], TraceId: ExecutionId: a65990bb-776acdf7-cb4257c1-e77e074, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=ZjdhOTQ5MjAtZWU4YzVmYTAtZjJhMTdiYzctZThhNGU4MTI=, TxId: , text: -- TScriptProgressActor::OnRunQuery DECLARE $execution_id AS Text; DECLARE $database AS Text; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $execution_status AS Int32; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, execution_status = $execution_status WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-01-10T12:22:38.867113Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593707299212585993:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.867134Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593707299212585994:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.867227Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.867423Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZjdhOTQ5MjAtZWU4YzVmYTAtZjJhMTdiYzctZThhNGU4MTI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [5:7593707299212585989:2367] 2026-01-10T12:22:38.867456Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [5:7593707299212586000:3391] 2026-01-10T12:22:38.870002Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715666:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:38.883759Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kekxqkh3bmyx9hmfxpeegqfd", Request has 18444976025950.667885s seconds to be completed 2026-01-10T12:22:38.886229Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekxqkh3bmyx9hmfxpeegqfd", Created new session, sessionId: ydb://session/3?node_id=5&id=YTdkMGZiY2ItNzAxNTRlYjYtNWUxN2M2ZTMtNzAyYTVhOGI=, workerId: [5:7593707299212586061:2377], database: /Root, longSession: 1, local sessions count: 3 2026-01-10T12:22:38.886471Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kekxqkh3bmyx9hmfxpeegqfd 2026-01-10T12:22:38.890812Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593707299212585998:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715666 completed, doublechecking } 2026-01-10T12:22:38.895091Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekxqkhe4hxwhkkcfyn2we9d, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YTdkMGZiY2ItNzAxNTRlYjYtNWUxN2M2ZTMtNzAyYTVhOGI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [5:7593707299212586061:2377] 2026-01-10T12:22:38.895138Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [5:7593707299212586075:3446] 2026-01-10T12:22:38.913111Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7593707299212586092:3451], for# user@builtin, access# DescribeSchema 2026-01-10T12:22:38.913140Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7593707299212586092:3451], for# user@builtin, access# DescribeSchema 2026-01-10T12:22:38.915167Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593707299212586089:2383], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:22:38.917763Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YTdkMGZiY2ItNzAxNTRlYjYtNWUxN2M2ZTMtNzAyYTVhOGI=, ActorId: [5:7593707299212586061:2377], ActorState: ExecuteState, LegacyTraceId: 01kekxqkhe4hxwhkkcfyn2we9d, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:22:38.918164Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxqkhe4hxwhkkcfyn2we9d", Forwarded response to sender actor, requestId: 11, sender: [5:7593707299212586074:2378], selfId: [5:7593707286327682372:2262], source: [5:7593707299212586061:2377] 2026-01-10T12:22:38.966459Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593707299212586098:3455] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:39.052310Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxqk0t8ywd4dakj6814xd8", Forwarded response to sender actor, requestId: 7, sender: [5:7593707299212585905:3340], selfId: [5:7593707286327682372:2262], source: [5:7593707299212585983:2364] 2026-01-10T12:22:39.052975Z node 5 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7593707299212585905:3340], ActorId: [5:7593707303507553413:3468], TraceId: ExecutionId: a65990bb-776acdf7-cb4257c1-e77e074, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7593707303507553414:3469] 2026-01-10T12:22:39.053026Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7593707303507553413:3468], ActorId: [5:7593707303507553414:3469], TraceId: ExecutionId: a65990bb-776acdf7-cb4257c1-e77e074, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 1, run create session 2026-01-10T12:22:39.053162Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976025950.498471s seconds to be completed 2026-01-10T12:22:39.055611Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=ZTMwZmMxYWQtZGQ5ZGFmZTUtNmU0ZWUzNmEtMzAzZTliZmU=, workerId: [5:7593707303507553416:2390], database: /Root, longSession: 1, local sessions count: 4 2026-01-10T12:22:39.055827Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:22:39.056157Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7593707303507553413:3468], ActorId: [5:7593707303507553414:3469], TraceId: ExecutionId: a65990bb-776acdf7-cb4257c1-e77e074, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=ZTMwZmMxYWQtZGQ5ZGFmZTUtNmU0ZWUzNmEtMzAzZTliZmU=, TxId: , text: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-01-10T12:22:39.056521Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTMwZmMxYWQtZGQ5ZGFmZTUtNmU0ZWUzNmEtMzAzZTliZmU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [5:7593707303507553416:2390] 2026-01-10T12:22:39.056585Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [5:7593707303507553418:3470] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TDataShardTrace::TestTraceDistributedUpsert-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2026-01-10T12:22:34.923771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:34.924540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:34.924574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:34.932899Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:34.933185Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:34.933383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:34.973235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:34.982596Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:34.983266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:34.985051Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:34.985127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:34.985175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:34.985571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:34.985663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:34.985761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:35.065483Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:35.095451Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:35.095624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:35.095714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:35.095771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:35.095805Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:35.095841Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.096054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.096130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.096422Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:35.096530Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:35.096583Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.096630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:35.096661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:35.096693Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:35.096738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:35.096774Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:35.096809Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:35.096930Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.096963Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.097039Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:35.103289Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:35.103361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:35.103469Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:35.103664Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:35.103722Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:35.103780Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:35.103831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:35.103867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:35.103901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:35.103931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.104240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:35.104291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:35.104330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:35.104361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.104424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:35.104464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:35.104494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:35.104523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.104544Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:35.116438Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:35.116507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.116573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.116646Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:35.116706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:35.117232Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.117295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.117357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:35.117431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:35.117464Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:35.117596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.117640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:35.117674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:35.117708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:35.121245Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:35.121302Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.121489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.121525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.121572Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.121611Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:35.121641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:35.121683Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:35.121714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:22:40.773724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:40.773753Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:40.773948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-10T12:22:40.773977Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.774004Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-10T12:22:40.774073Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:40.774093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-10T12:22:40.774130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-10T12:22:40.774165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:40.774210Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:40.774300Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:40.774468Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-10T12:22:40.774493Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.774514Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-10T12:22:40.774587Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-10T12:22:40.774606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.774623Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-10T12:22:40.774664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:238:2230], Recipient [1:457:2399]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-10T12:22:40.774682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:22:40.774727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-10T12:22:40.774776Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-10T12:22:40.774829Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-10T12:22:40.774876Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-10T12:22:40.774981Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-10T12:22:40.775010Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.775030Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-10T12:22:40.775075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:40.775152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:40.775184Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-10T12:22:40.775222Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:22:40.775256Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-10T12:22:40.775278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-10T12:22:40.775323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:40.775348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-10T12:22:40.775370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-10T12:22:40.775396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-10T12:22:40.775419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:40.775441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-10T12:22:40.775472Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-10T12:22:40.775490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-10T12:22:40.775885Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-10T12:22:40.775922Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:22:40.775973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-10T12:22:40.775996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-10T12:22:40.776017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-10T12:22:40.776062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:40.776266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-10T12:22:40.776290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-10T12:22:40.776312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-10T12:22:40.776339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-10T12:22:40.776364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-10T12:22:40.776381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-10T12:22:40.776398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-10T12:22:40.776418Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:40.776439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:22:40.776463Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-10T12:22:40.776499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-10T12:22:40.776672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-10T12:22:40.776705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.776743Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-10T12:22:40.790151Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:22:40.790211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-10T12:22:40.790271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-10T12:22:40.790336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:40.790390Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:22:40.790702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-10T12:22:40.790743Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:22:40.790778Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> DataShardOutOfOrder::LocksBrokenStats [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:34.806114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:34.912140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:34.917907Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:34.918185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:34.918249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:35.196664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:35.196773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:35.201268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047752041836 != 1768047752041839 2026-01-10T12:22:35.212414Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:35.254784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:35.338312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:35.622067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:35.636140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:35.738028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:35.776728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:35.777740Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:35.778054Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:35.778289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:35.819270Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:35.819848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:35.819932Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:35.821271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:35.821329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:35.821363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:35.821652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:35.821767Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:35.821830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:35.832556Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:35.867034Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:35.867206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:35.867291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:35.867325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:35.867358Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:35.867389Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:35.867595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.867684Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.867981Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:35.868096Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:35.868251Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:35.868303Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:35.868340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:35.868372Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:35.868404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:35.868437Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:35.868485Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:35.868566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.868608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.868660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:35.868751Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:35.868801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:35.868914Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:35.869155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:35.869204Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:35.869299Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:35.869346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:35.869398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:35.869446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:35.869477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:35.869756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:35.869796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:35.869849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:35.869881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:35.869935Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:35.869965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:35.869996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:35.870026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:35.870062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:35.871802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:35.871854Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:35.882535Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 902Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-10T12:22:42.177934Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2026-01-10T12:22:42.177965Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:42.177993Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:42.178034Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-10T12:22:42.178082Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715664] at 72075186224037888 2026-01-10T12:22:42.178117Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-10T12:22:42.178143Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:42.178168Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:22:42.178193Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:22:42.178219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-10T12:22:42.178245Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:22:42.178267Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-10T12:22:42.178289Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-10T12:22:42.178352Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-10T12:22:42.178443Z node 2 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-10T12:22:42.178511Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-10T12:22:42.178572Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-10T12:22:42.178602Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-10T12:22:42.178627Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:42.178652Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:42.178694Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:22:42.178763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2026-01-10T12:22:42.178798Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:42.178837Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:42.178878Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:42.178924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-10T12:22:42.178951Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:42.178977Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2026-01-10T12:22:42.179038Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:22:42.179085Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:42.179135Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:42.180471Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2026-01-10T12:22:42.180979Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1118:2929], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.181023Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.181080Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1117:2928], serverId# [2:1118:2929], sessionId# [0:0:0] 2026-01-10T12:22:42.181186Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [2:1116:2927], Recipient [2:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-10T12:22:42.182815Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1121:2932], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.182870Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.182922Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1120:2931], serverId# [2:1121:2932], sessionId# [0:0:0] 2026-01-10T12:22:42.183120Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1119:2930], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-10T12:22:42.183243Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:42.183299Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-10T12:22:42.183349Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-10T12:22:42.183411Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:42.183502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:42.183542Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:42.183586Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:42.183623Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:42.183678Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:22:42.183717Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:42.183748Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:42.183770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:42.183796Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:42.183888Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-10T12:22:42.184166Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1119:2930], 1000} after executionsCount# 1 2026-01-10T12:22:42.184235Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1119:2930], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:42.184312Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1119:2930], 1000} finished in read 2026-01-10T12:22:42.184381Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:42.184413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:42.184440Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:42.184467Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:42.184510Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:42.184537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:42.184566Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:22:42.184605Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:42.184707Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> DataStreams::TestReservedResourcesMetering >> DataStreams::TestDeleteStream >> DataStreams::TestPutRecordsOfAnauthorizedUser >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataStreams::TestStreamStorageRetention >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataStreams::TestUpdateStream >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:36.464445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:36.551468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:36.559517Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:36.559941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:36.560045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:36.859071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:36.859194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:36.864089Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047754320259 != 1768047754320262 2026-01-10T12:22:36.878251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:36.921038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:37.012902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:37.294242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:37.308668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:37.410465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:37.442195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:37.442973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:37.443170Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-10T12:22:37.443395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:37.478150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:37.478482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:37.479289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:37.479482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:37.480776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:37.480852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:37.480896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:37.481203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:37.481277Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:37.481449Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-10T12:22:37.481663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:37.487232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:37.487336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:919:2768] in generation 1 2026-01-10T12:22:37.487465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:878:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:37.488438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:37.488538Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:37.489467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:22:37.489508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:22:37.489540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:22:37.489737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:37.489842Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:37.489882Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-10T12:22:37.500627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:37.527932Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:37.528112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:37.528194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-10T12:22:37.528237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:37.528264Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:37.528291Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:37.528496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.528527Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.528597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:37.528643Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:22:37.528686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:37.528724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-10T12:22:37.528742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:22:37.528763Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:22:37.528796Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:22:37.529082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.529111Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:37.529229Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:37.529316Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:37.529412Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:37.529458Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:37.529498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:37.529536Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:37.529561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:37.529584Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:37.529616Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:37.529660Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-10T12:22:37.529722Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-10T12:22:37.529832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:895:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:37.529872Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:37.529918Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:895:2771], sessionId# [0:0:0] 2026-01-10T12:22:37.529951Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-10T12:22:37.529978Z node 1 :TX_DAT ... tashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:22:38.435750Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=1 2026-01-10T12:22:38.435781Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:22:38.435817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:38.435840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:22:38.435870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:22:38.435896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:38.435949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:38.435967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:22:38.436036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:38.436058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:38.436101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:22:38.436137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:38.436168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T12:22:38.446811Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:22:38.446869Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:22:38.446902Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:22:38.446965Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:38.447395Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [1:1163:2945] TxId: 281474976715662. Ctx: { TraceId: 01kekxqk1j62qnc79z3d8jht3g, Database: , SessionId: ydb://session/3?node_id=1&id=YjZkOGM5YzMtNzkxMWJmNC0xZTVmNWM5MS04MDNkZGQ5Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:38.447439Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [1:1163:2945] TxId: 281474976715662. Ctx: { TraceId: 01kekxqk1j62qnc79z3d8jht3g, Database: , SessionId: ydb://session/3?node_id=1&id=YjZkOGM5YzMtNzkxMWJmNC0xZTVmNWM5MS04MDNkZGQ5Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ... waiting for SysViewsRoster update finished 2026-01-10T12:22:41.946281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:41.953923Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:41.954148Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:41.954197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:42.158958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:42.159097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:42.179434Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047758936432 != 1768047758936436 2026-01-10T12:22:42.182966Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:42.224971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:42.294610Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:42.586445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:42.601180Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:42.703731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:42.972119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:43.180693Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1026:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.180801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1037:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.180873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.181841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1042:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.181940Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.186240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:43.329799Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:43.365464Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1098:2914] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for readsets 2026-01-10T12:22:44.170688Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1180:2953], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1158:2953]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-10T12:22:44.181454Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:1172:2953], SessionActorId: [2:1158:2953], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:1158:2953]. 2026-01-10T12:22:44.181728Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976715664, task: 1, CA Id [2:1206:2991]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-10T12:22:44.181971Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=OGM5MjVjNGMtYmQzOWYzM2ItMWM3M2E2NjUtNzUyYWMyM2E=, ActorId: [2:1158:2953], ActorState: ExecuteState, LegacyTraceId: 01kekxqr2r3716ka4s5cwejk29, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:1173:2953] from: [2:1172:2953] trace_id# 2026-01-10T12:22:44.182684Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:1173:2953] TxId: 281474976715663. Ctx: { TraceId: 01kekxqr2r3716ka4s5cwejk29, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGM5MjVjNGMtYmQzOWYzM2ItMWM3M2E2NjUtNzUyYWMyM2E=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-10T12:22:44.183115Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=OGM5MjVjNGMtYmQzOWYzM2ItMWM3M2E2NjUtNzUyYWMyM2E=, ActorId: [2:1158:2953], ActorState: ExecuteState, LegacyTraceId: 01kekxqr2r3716ka4s5cwejk29, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataStreams::TestUpdateStorage >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:36.797413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:36.915320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:36.923442Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:36.923893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:36.923995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:37.254786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:37.254909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:37.260320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047754687128 != 1768047754687131 2026-01-10T12:22:37.275332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:37.321620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:37.404821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:37.694793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:37.708910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:37.813134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:38.127839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:38.335027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1114:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.335103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1124:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.335150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.335910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.336042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.339330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:38.491809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1128:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:38.555009Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1186:2962] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets ... sending immediate upsert ... waiting for immediate propose ... immediate upsert is blocked 2026-01-10T12:22:39.415133Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2026-01-10T12:22:39.422974Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:1370:3015], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1261:3015]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1370:3015]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2026-01-10T12:22:39.423440Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1364:3015], SessionActorId: [1:1261:3015], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1261:3015]. 2026-01-10T12:22:39.424066Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NzA1NWU5YmMtN2UzMjI2YzktNmI1Nzk2MDMtNzI1YzUyY2Y=, ActorId: [1:1261:3015], ActorState: ExecuteState, LegacyTraceId: 01kekxqkzb4758b3ysjf5neegr, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1365:3015] from: [1:1364:3015] trace_id# 2026-01-10T12:22:39.426183Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:1365:3015] TxId: 281474976715665. Ctx: { TraceId: 01kekxqkzb4758b3ysjf5neegr, Database: , SessionId: ydb://session/3?node_id=1&id=NzA1NWU5YmMtN2UzMjI2YzktNmI1Nzk2MDMtNzI1YzUyY2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } trace_id# 2026-01-10T12:22:39.426822Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzA1NWU5YmMtN2UzMjI2YzktNmI1Nzk2MDMtNzI1YzUyY2Y=, ActorId: [1:1261:3015], ActorState: ExecuteState, LegacyTraceId: 01kekxqkzb4758b3ysjf5neegr, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`." issue_code: 2006 severity: 1 issues { message: "Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting" issue_code: 2006 severity: 1 } } trace_id# 2026-01-10T12:22:39.427147Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [1:1337:3017], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1263:3017]TEvDeliveryProblem was received from tablet: 72075186224037889 2026-01-10T12:22:39.427249Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1329:3017], SessionActorId: [1:1263:3017], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1263:3017]. 2026-01-10T12:22:39.427847Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MTI1Yjc4MWYtNTRiY2Q1OTctMThjNjVkNjctZmQ3MmIyMDE=, ActorId: [1:1263:3017], ActorState: ExecuteState, LegacyTraceId: 01kekxqkwb1bwrh4xt68fxtbv7, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1330:3017] from: [1:1329:3017] trace_id# 2026-01-10T12:22:39.427954Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:1330:3017] TxId: 281474976715664. Ctx: { TraceId: 01kekxqkwb1bwrh4xt68fxtbv7, Database: , SessionId: ydb://session/3?node_id=1&id=MTI1Yjc4MWYtNTRiY2Q1OTctMThjNjVkNjctZmQ3MmIyMDE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } trace_id# 2026-01-10T12:22:39.428776Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTI1Yjc4MWYtNTRiY2Q1OTctMThjNjVkNjctZmQ3MmIyMDE=, ActorId: [1:1263:3017], ActorState: ExecuteState, LegacyTraceId: 01kekxqkwb1bwrh4xt68fxtbv7, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } } ... waiting for SysViewsRoster update finished 2026-01-10T12:22:42.469418Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:42.475520Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:42.475738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:42.475778Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:42.660428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:42.660545Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:42.689247Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047760180901 != 1768047760180905 2026-01-10T12:22:42.694373Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:42.767007Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:42.847130Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:43.113426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:43.126721Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:43.228649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:43.449087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-10T12:22:43.654096Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.654162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.654235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.654800Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.654853Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:43.657575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:43.796628Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:43.831356Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet 2026-01-10T12:22:44.485606Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1212:2953], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1158:2953]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:39.998273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:39.998611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:40.127315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:40.128509Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:40.139586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:40.140061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:40.140357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:40.142489Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:40.142879Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:40.143068Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:40.586126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:40.705303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:40.705445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:40.706288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:40.706366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:40.751426Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:22:40.752376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:40.752969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:40.832391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:40.857850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:41.466077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:41.533816Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:41.534010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:41.859533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:41.921996Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:41.926510Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:41.926828Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1615:2397] 2026-01-10T12:22:41.927077Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:41.986659Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1589:2384], Recipient [2:1615:2397]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:41.991854Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:41.992037Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:41.993693Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:41.993772Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:41.993843Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:41.994199Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:41.994465Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:41.994541Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1639:2397] in generation 1 2026-01-10T12:22:41.997369Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:42.031358Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:42.031541Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:42.031638Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1643:2414] 2026-01-10T12:22:42.031676Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:42.031711Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:42.031752Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:42.032041Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1615:2397], Recipient [2:1615:2397]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:42.032126Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:42.032468Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:42.032561Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:42.032732Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:42.032785Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:42.032827Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:42.032864Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:42.032896Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:42.032927Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:42.032974Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:42.033083Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1612:2395], Recipient [2:1615:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.033127Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.033167Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1601:3051], serverId# [2:1612:2395], sessionId# [0:0:0] 2026-01-10T12:22:42.033947Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:822:2464], Recipient [2:1612:2395] 2026-01-10T12:22:42.033994Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:42.034090Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:42.034315Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:42.034363Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:42.034468Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:42.034522Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:42.034573Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:42.034616Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:42.034649Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:42.034908Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution ... iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:44.673747Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:44.674620Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2000:2454], Recipient [2:1615:2397]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:44.674681Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-10T12:22:44.867733Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2016:2455], Recipient [2:1615:2397]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-10T12:22:44.868017Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:44.868131Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:44.868222Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:44.868260Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:44.868298Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:44.868338Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:44.868385Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:22:44.868432Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:44.868458Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:44.868487Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:44.868511Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:44.868668Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:44.868938Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:22:44.868995Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-10T12:22:44.869045Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2016:2455], 0} after executionsCount# 1 2026-01-10T12:22:44.869093Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2016:2455], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:44.869178Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2016:2455], 0} finished in read 2026-01-10T12:22:44.869241Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:44.869269Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:44.869298Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:44.869345Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:44.869397Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:22:44.869420Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:44.869451Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:22:44.869505Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:44.869611Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:44.870467Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2016:2455], Recipient [2:1615:2397]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:44.870530Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2026-01-10T12:22:45.037867Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2038:2456], Recipient [2:1615:2397]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-10T12:22:45.038107Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:45.038186Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:45.038283Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-10T12:22:45.038326Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:45.038364Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:45.038402Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:45.038453Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2026-01-10T12:22:45.038492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-10T12:22:45.038527Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:45.038551Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:45.038575Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:45.038706Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-10T12:22:45.039011Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:22:45.039067Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-10T12:22:45.039119Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2038:2456], 0} after executionsCount# 1 2026-01-10T12:22:45.039168Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2038:2456], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:45.039250Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2038:2456], 0} finished in read 2026-01-10T12:22:45.039335Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-10T12:22:45.039364Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:45.039390Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:45.039422Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:45.039470Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-10T12:22:45.039502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:45.039534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 72075186224037888 has finished 2026-01-10T12:22:45.039572Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:45.039670Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:22:45.040792Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2038:2456], Recipient [2:1615:2397]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:45.040855Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:22:45.042015Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:260:2140], Recipient [2:1615:2397]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2026-01-10T12:22:17.493454Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707209599228485:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:17.493521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:17.709513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:17.709640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:17.751943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:17.808534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:22:17.810225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:17.811566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707209599228455:2081] 1768047737491750 != 1768047737491753 2026-01-10T12:22:17.977527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:22:18.002834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:18.500655Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:19.887868Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.893772Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=N2Y2YmVmNTMtYjQ3NjcxZTctM2M2MmE2M2UtNGI2OGIxNTM=, workerId: [1:7593707218189163882:2302], database: , longSession: 0, local sessions count: 1 2026-01-10T12:22:19.894157Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=N2Y2YmVmNTMtYjQ3NjcxZTctM2M2MmE2M2UtNGI2OGIxNTM=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7593707218189163882:2302] 2026-01-10T12:22:19.894206Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2026-01-10T12:22:19.894261Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:22:19.894300Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:22:19.894322Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:22:19.895395Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2718} SessionId: ydb://session/3?node_id=1&id=N2Y2YmVmNTMtYjQ3NjcxZTctM2M2MmE2M2UtNGI2OGIxNTM=, ActorId: [1:7593707218189163882:2302], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxy_request_id# 2 trace_id# 2026-01-10T12:22:19.895598Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7593707213894196540:2483], selfId: [1:7593707209599228717:2266], source: [1:7593707218189163882:2302] 2026-01-10T12:22:19.895819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707218189163890:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:19.895935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:19.896263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707218189163900:2304], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:19.896343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:19.904095Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Handle TEvPrivate::TEvOnRequestTimeout(2) 2026-01-10T12:22:19.904128Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1215: Invalid request info while on request timeout handle. RequestId: 2 ... waiting for SysViewsRoster update finished 2026-01-10T12:22:25.530978Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:25.531929Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:25.533849Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-10T12:22:25.542427Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:25.543215Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:25.543492Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:25.543569Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:25.544945Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:25.544979Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:25.888476Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:25.993340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:25.993477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:25.993949Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:25.994034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:26.038764Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:22:26.039690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:26.040021Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:26.098566Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:22:26.111768Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1490:2979] 2026-01-10T12:22:26.900989Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=NDY2NmExZTktN2NkNDQ4YTAtZDc2MWNlOTYtNTBhMDYwY2M=, workerId: [3:1491:2375], database: , longSession: 1, local sessions count: 1 2026-01-10T12:22:26.901279Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NDY2NmExZTktN2NkNDQ4YTAtZDc2MWNlOTYtNTBhMDYwY2M= 2026-01-10T12:22:26.902063Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=NDY2NmExZTktN2NkNDQ4YTAtZDc2MWNlOTYtNTBhMDYwY2M=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2026-01-10T12:22:26.902135Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2026-01-10T12:22:26.903347Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=NDY2NmExZTktN2NkNDQ4YTAtZDc2MWNlOTYtNTBhMDYwY2M=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1491:2375] 2026-01-10T12:22:26.903400Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2026-01-10T12:22:27.191608Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1492:2980], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:27.191716Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:27.192063Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1511:2983], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NO ... 2026-01-10T12:22:42.631374Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=MjQ4YWQxMzAtNDgxNDcxYWItNzg0MDNmMjItMjYwNGU1Mzg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 46, targetId: [7:7593707316524223653:2554] 2026-01-10T12:22:42.631403Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 46 timeout: 300.000000s actor id: [7:7593707316524223674:3033] 2026-01-10T12:22:42.639083Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 46, sender: [7:7593707316524223673:2561], selfId: [7:7593707282164483653:2248], source: [7:7593707316524223653:2554] 2026-01-10T12:22:42.639571Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7593707316524223650:2551], ActorId: [7:7593707316524223651:2552], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=7&id=MjQ4YWQxMzAtNDgxNDcxYWItNzg0MDNmMjItMjYwNGU1Mzg=, TxId: 2026-01-10T12:22:42.639676Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7593707316524223650:2551], ActorId: [7:7593707316524223651:2552], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=MjQ4YWQxMzAtNDgxNDcxYWItNzg0MDNmMjItMjYwNGU1Mzg=, TxId: 2026-01-10T12:22:42.639737Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7593707316524223650:2551], ActorId: [7:7593707316524223651:2552], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-10T12:22:42.639828Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [7:7593707316524223649:2550], ActorId: [7:7593707316524223650:2551], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7593707316524223651:2552] SUCCESS 2026-01-10T12:22:42.639956Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7593707316524223611:3009] ActorId: [7:7593707316524223612:3010] Database: /dc-1 ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-10T12:22:42.640017Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=MjQ4YWQxMzAtNDgxNDcxYWItNzg0MDNmMjItMjYwNGU1Mzg=, workerId: [7:7593707316524223653:2554], local sessions count: 1 2026-01-10T12:22:42.640017Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7593707316524223611:3009] ActorId: [7:7593707316524223612:3010] Database: /dc-1 ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44. Reply success 2026-01-10T12:22:42.647021Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekxqq6p550nep2yy5zg9ccf, Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ZGIyMjk2NWItNmIzZjkyMzgtNmYxYzgwNjMtNWQ2NTQ0NzU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 47, targetId: [7:7593707307934288890:2509] 2026-01-10T12:22:42.647072Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 47 timeout: 300.000000s actor id: [7:7593707316524223698:3041] 2026-01-10T12:22:43.176978Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekxqq6p550nep2yy5zg9ccf", Forwarded response to sender actor, requestId: 47, sender: [7:7593707316524223697:2566], selfId: [7:7593707282164483653:2248], source: [7:7593707307934288890:2509] 2026-01-10T12:22:43.178549Z node 7 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191037:3059], ActorId: [7:7593707320819191038:3060], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [7:7593707320819191039:3061] 2026-01-10T12:22:43.178552Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:833: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7593707320819191036:3058] ActorId: [7:7593707320819191037:3059] Database: /dc-1 ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44. Bootstrap. Start TLeaseUpdateRetryActor [7:7593707320819191038:3060] 2026-01-10T12:22:43.178591Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-10T12:22:43.178722Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976025946.372915s seconds to be completed 2026-01-10T12:22:43.180275Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, workerId: [7:7593707320819191041:2581], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:22:43.180458Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:22:43.180625Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:695: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2026-01-10T12:22:43.180733Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-10T12:22:43.181039Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 49, targetId: [7:7593707320819191041:2581] 2026-01-10T12:22:43.181072Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 49 timeout: 300.000000s actor id: [7:7593707320819191043:3062] 2026-01-10T12:22:43.394190Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 49, sender: [7:7593707320819191042:2582], selfId: [7:7593707282164483653:2248], source: [7:7593707320819191041:2581] 2026-01-10T12:22:43.394345Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, TxId: 01kekxqqxybfbsz55pqyz1qyfq 2026-01-10T12:22:43.394473Z node 7 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, TxId: 01kekxqqxybfbsz55pqyz1qyfq 2026-01-10T12:22:43.394539Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:432: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01kekxqqxybfbsz55pqyz1qyfq in session: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA= 2026-01-10T12:22:43.394586Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191037:3059], ActorId: [7:7593707320819191038:3060], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7593707320819191039:3061] NOT_FOUND 2026-01-10T12:22:43.394679Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:843: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7593707320819191036:3058] ActorId: [7:7593707320819191037:3059] Database: /dc-1 ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44. Lease update [7:7593707320819191039:3061] finished NOT_FOUND, issues: {
: Error: No such execution } 2026-01-10T12:22:43.396005Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 50, targetId: [7:7593707320819191041:2581] 2026-01-10T12:22:43.396032Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 50 timeout: 600.000000s actor id: [7:7593707320819191065:3072] 2026-01-10T12:22:43.396618Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 50, sender: [7:7593707320819191064:2589], selfId: [7:7593707282164483653:2248], source: [7:7593707320819191041:2581] 2026-01-10T12:22:43.396720Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:441: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7593707320819191038:3060], ActorId: [7:7593707320819191039:3061], TraceId: ExecutionId: afe1b8fe-5aac37b2-536b565a-b33e3f44, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2026-01-10T12:22:43.396953Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=MTg3ODU2MTQtOTk0OTNkMWItZjgwYjhkZmMtNDc0YWI4MjA=, workerId: [7:7593707320819191041:2581], local sessions count: 1 2026-01-10T12:22:43.403398Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=ZGIyMjk2NWItNmIzZjkyMzgtNmYxYzgwNjMtNWQ2NTQ0NzU=, workerId: [7:7593707307934288890:2509], local sessions count: 0 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy |93.2%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestControlPlaneAndMeteringData |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> DataStreams::TestNonChargeableUser >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:38.060969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:38.175873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:38.185643Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:38.186098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:38.186201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:38.485318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:38.485403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:38.491030Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047755876756 != 1768047755876759 2026-01-10T12:22:38.505761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:38.549520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:38.636048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:38.914005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:38.927632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:39.028506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:39.289728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:39.500682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:39.500823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:39.500901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:39.501920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:39.502284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:39.506357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:39.648475Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:39.730615Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-10T12:22:41.415867Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=M2I5NmE4YmYtMjFkYjEwNzctYzBjNDRhMjItMjY2ZWU5ZWQ=, ActorId: [1:1165:2960], ActorState: ExecuteState, LegacyTraceId: 01kekxqnyvf2g9hz9gz8v2q33f, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-10T12:22:44.033601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:44.040275Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:44.040495Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:44.040538Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:44.224725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:44.224842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:44.244142Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047761815143 != 1768047761815147 2026-01-10T12:22:44.248000Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:44.289955Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:44.356575Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:44.644463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:44.657922Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:44.759683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:44.983796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:45.222334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:45.222472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:45.222582Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:45.224057Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:45.224277Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:45.229989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:45.394152Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:45.429995Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-10T12:22:47.011188Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZGIwZjU1YjItOGIwODA1NmEtMTcyMTU1MWMtOWVjOTcwOQ==, ActorId: [2:1159:2962], ActorState: ExecuteState, LegacyTraceId: 01kekxqvcne40b9sqcvwd6k3e9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:44.795282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:44.967401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:44.995505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:44.996258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:44.996373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:45.594359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.594518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.599863Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047762369757 != 1768047762369760 2026-01-10T12:22:45.613424Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.662483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.765189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:46.081068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:46.095373Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:46.213983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query >> TDataShardTrace::TestTraceDistributedSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:45.149803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:45.273141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:45.281225Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:45.281682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:45.281784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:45.665298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.665443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.671870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047762383787 != 1768047762383790 2026-01-10T12:22:45.687890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.735410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.823886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:46.117088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:46.131283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:46.238760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:48.242361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.242491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.242560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.244265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.244428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.249021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:48.275257Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:48.422389Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:48.489036Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:45.050621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:45.171066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:45.180154Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:45.180668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:45.180781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:45.599614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.599763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.606434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047762369729 != 1768047762369732 2026-01-10T12:22:45.620134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.667241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.777571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:46.081230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:46.095372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:46.215358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:48.253405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.253550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.253630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.254622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.254772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.259855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:48.286665Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:48.435245Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:48.496834Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> VectorIndexBuildTest::PrefixedDuplicates [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true >> DataStreams::TestGetShardIterator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:45.347314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:45.447852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:45.456532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:45.456955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:45.457061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:45.832795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.832951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.839437Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047762369768 != 1768047762369771 2026-01-10T12:22:45.852224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.896666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.999008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:46.284633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:46.299652Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:46.415194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:48.393470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.393615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.393694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.394663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.394814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.399613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:48.426214Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:48.575376Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:48.643993Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:45.073741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:45.194088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:45.203005Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:45.203507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:45.203605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:45.606677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.606824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.613784Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047762369723 != 1768047762369726 2026-01-10T12:22:45.626949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.674272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.777061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:46.081320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:46.096101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:46.214006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:48.241871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.241988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.242046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.244123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.244252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:48.248747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:48.275217Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:48.420630Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:48.493568Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews >> TSchemeShardSysViewTest::EmptyName |93.2%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table >> TSchemeShardSysViewTest::AsyncCreateSameSysView >> TSchemeShardSysViewTest::DropSysView >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:41.938592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:42.020791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:42.026794Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:42.027049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:42.027120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:42.300635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:42.300749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:42.305485Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047759600141 != 1768047759600144 2026-01-10T12:22:42.315468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:42.361150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:42.457124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:42.725500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:42.739545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:42.841105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:42.875879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:42.876894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:42.877171Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:22:42.877407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:42.926979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:42.927709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:42.927820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:42.929445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:22:42.929532Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:22:42.929583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:22:42.929917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:42.930031Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:42.930098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:22:42.940833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:42.969643Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:22:42.969827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:42.969921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:22:42.969958Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:22:42.970028Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:22:42.970074Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:22:42.970289Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:42.970369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:42.970689Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:22:42.970768Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:22:42.970931Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:22:42.970985Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:42.971025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:22:42.971061Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:22:42.971095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:22:42.971129Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:22:42.971182Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:22:42.971281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.971316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:42.971363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:22:42.971452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:22:42.971506Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:42.971605Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:22:42.971857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:22:42.971912Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:22:42.972024Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:22:42.972100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:22:42.972166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:22:42.972218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:22:42.972253Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:22:42.972561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:42.972599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:22:42.972644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:22:42.972691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:22:42.972747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:22:42.972778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-10T12:22:42.972811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-10T12:22:42.972852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-10T12:22:42.972890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:42.974482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-10T12:22:42.974527Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:22:42.985266Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... amingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-10T12:22:51.814323Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-10T12:22:51.814396Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:22:51.814754Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1368:3096] 2026-01-10T12:22:51.814841Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1368:3096], channels: 1 2026-01-10T12:22:51.814947Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:22:51.815021Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1368:3096], trace_id# 2026-01-10T12:22:51.815106Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1368:3096], trace_id# 2026-01-10T12:22:51.815167Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:22:51.816006Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1368:3096] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:22:51.816111Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1368:3096], trace_id# 2026-01-10T12:22:51.816183Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1368:3096], trace_id# 2026-01-10T12:22:51.816707Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1369:3096], Recipient [2:1285:3049]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-10T12:22:51.816852Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-10T12:22:51.816922Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v4000/18446744073709551615 2026-01-10T12:22:51.816971Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-10T12:22:51.817042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-10T12:22:51.817130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:51.817178Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-10T12:22:51.817222Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:22:51.817267Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:22:51.817322Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-10T12:22:51.817371Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:51.817400Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:22:51.817426Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-10T12:22:51.817452Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-10T12:22:51.817563Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-10T12:22:51.817784Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1369:3096], 0} after executionsCount# 1 2026-01-10T12:22:51.817848Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1369:3096], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:22:51.817934Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1369:3096], 0} finished in read 2026-01-10T12:22:51.818001Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:51.818034Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:22:51.818062Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:22:51.818104Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:22:51.818163Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-10T12:22:51.818188Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:22:51.818220Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-10T12:22:51.818282Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:22:51.819196Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1369:3096], Recipient [2:1285:3049]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:22:51.819271Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-10T12:22:51.819486Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1368:3096] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 818 Tasks { TaskId: 1 CpuTimeUs: 226 FinishTimeMs: 1768047771818 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 104 BuildCpuTimeUs: 122 HostName: "ghrun-tjszgfszoq" NodeId: 2 StartTimeMs: 1768047771818 CreateTimeMs: 1768047771815 UpdateTimeMs: 1768047771818 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:22:51.819573Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1368:3096] 2026-01-10T12:22:51.819735Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:22:51.819816Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [2:1364:3096] TxId: 281474976715672. Ctx: { TraceId: 01kekxr01gcqym0j6wyjty5kve, Database: , SessionId: ydb://session/3?node_id=2&id=ODA3MTJlMmYtOTZjNGI3ODItYjdkNzYwODQtYzcxMWQ4ODM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000818s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } } >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView >> TSchemeShardSysViewTest::EmptyName [GOOD] >> TSchemeShardSysViewTest::CreateSysView >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> TSchemeShardSysViewTest::DropSysView [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] >> TxUsage::WriteToTopic_Demo_47_Table >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:52.648801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:52.648893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.648942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:52.648983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:52.649020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:52.649050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:52.649155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.649240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:52.650099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:52.651410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:52.741728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:52.741786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:52.757559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:52.757674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:52.757891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:52.768723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:52.768995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:52.772986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:52.774616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:52.780451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.781554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:52.788049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:52.788292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:52.788353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:52.788698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:52.922514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.924995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.925071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.925123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 678944 2026-01-10T12:22:53.582674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:22:53.582714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:22:53.584631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.584699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:22:53.584749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:22:53.586401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.586452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.586501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:22:53.586573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:22:53.586698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:22:53.588246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:22:53.588395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:22:53.588742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:53.588872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:53.588928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:22:53.589226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:22:53.589285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:22:53.589481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:22:53.589554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:53.591410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:53.591470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:53.591645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.591705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:22:53.592006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.592058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:22:53.592174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:22:53.592214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:22:53.592255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:22:53.592294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:22:53.592331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:22:53.592385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:22:53.592432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:22:53.592463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:22:53.592539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:22:53.592577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:22:53.592617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:22:53.593233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:22:53.593326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:22:53.593378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:22:53.593427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:22:53.593470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:22:53.593555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:22:53.596493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:22:53.596923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:22:53.597502Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:680:2670] Bootstrap 2026-01-10T12:22:53.598677Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:680:2670] Become StateWork (SchemeCache [1:685:2675]) 2026-01-10T12:22:53.601653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:22:53.601815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0 2026-01-10T12:22:53.601897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0, sysViewDescription: Name: "" Type: EPartitionStats 2026-01-10T12:22:53.602012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-10T12:22:53.606040Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:680:2670] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:22:53.609801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:53.610094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ 2026-01-10T12:22:53.610574Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:22:53.610794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:22:53.610854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:22:53.611239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:22:53.611328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:22:53.611365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:695:2685] TestWaitNotification: OK eventTxId 101 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:52.648774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:52.648878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.648924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:52.648967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:52.649003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:52.649032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:52.649114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.649202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:52.650016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:52.651359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:52.740942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:52.741000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:52.756143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:52.756238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:52.757901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:52.768363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:52.768655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:52.772995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:52.774627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:52.780641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.781524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:52.788013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:52.788264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:52.788389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:52.788648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:52.961607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.965967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.966998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.967108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.967189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... views_update.cpp:213: SysViewsRosterUpdate# [1:824:2800] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_ds_pdisks' 2026-01-10T12:22:53.803805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.803917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.803951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-10T12:22:53.803993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 43 2026-01-10T12:22:53.804031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 39 2026-01-10T12:22:53.804458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.804580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.804616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-10T12:22:53.804662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:22:53.804697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:22:53.804759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720658, subscribers: 1 2026-01-10T12:22:53.804807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:824:2800] 2026-01-10T12:22:53.807858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.808653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-10T12:22:53.808846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:824:2800] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_sys_view' 2026-01-10T12:22:53.808896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:824:2800] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:766:2753] sender: [1:870:2058] recipient: [1:15:2062] 2026-01-10T12:22:53.873518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:53.873798Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 299us result status StatusSuccess 2026-01-10T12:22:53.874177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:53.874928Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:53.875143Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 236us result status StatusPathDoesNotExist 2026-01-10T12:22:53.875327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000042, drop txId: 281474976720658" Path: "/MyRoot/.sys/new_sys_view" PathId: 38 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:53.875925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:53.876386Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_ds_pdisks" took 412us result status StatusPathDoesNotExist 2026-01-10T12:22:53.876558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_ds_pdisks\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976720657" Path: "/MyRoot/.sys/new_ds_pdisks" PathId: 39 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:53.877214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:53.877430Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_partition_stats" took 236us result status StatusSuccess 2026-01-10T12:22:53.877761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_partition_stats" PathDescription { Self { Name: "new_partition_stats" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:53.199658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:53.199737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.199768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:53.199808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:53.199833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:53.199857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:53.199931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.199998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:53.200794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:53.201033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:53.291543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:53.291623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:53.310522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:53.310678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:53.310914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:53.319297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:53.319523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:53.320227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:53.320467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:53.322611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.322782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:53.323951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:53.324005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.324064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:53.324123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:53.324182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:53.324431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.461601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.465673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.465877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.465986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.466858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-10T12:22:54.188305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-10T12:22:54.188334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-10T12:22:54.188369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-10T12:22:54.188412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-10T12:22:54.188436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-10T12:22:54.188471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-10T12:22:54.188494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-10T12:22:54.188515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-10T12:22:54.188562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-10T12:22:54.188587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-10T12:22:54.188630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-10T12:22:54.188662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-10T12:22:54.188683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-10T12:22:54.188706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-10T12:22:54.188728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-10T12:22:54.188748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-10T12:22:54.188769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-10T12:22:54.188791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-10T12:22:54.188824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-10T12:22:54.188870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:22:54.188893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:22:54.188914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:22:54.188936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:22:54.188971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:22:54.188999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:22:54.189020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:22:54.189042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:22:54.189067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:22:54.189090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:22:54.189257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.189387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.189460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.189558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.189648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.189801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.190115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.190231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.190627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.190700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.190920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.191882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.192689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.197307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:54.202588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:54.202669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:54.202749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:54.202798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:54.202834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:54.205022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:743:2730] sender: [1:805:2058] recipient: [1:15:2062] 2026-01-10T12:22:54.276002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:54.276288Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 308us result status StatusPathDoesNotExist 2026-01-10T12:22:54.276502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:52.648759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:52.648845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.648883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:52.648917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:52.648943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:52.648976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:52.649032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:52.649083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:52.649697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:52.651386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:52.742200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:52.742267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:52.757946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:52.758035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:52.758212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:52.767944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:52.768363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:52.772975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:52.774690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:52.780625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.781596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:52.788066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:52.788246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:52.788309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:52.788375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:52.788709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:52.935995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.937937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.938020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.938106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:52.938178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.255742Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:22:54.255810Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:54.255836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:54.255865Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:54.255889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:54.255917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:22:54.255957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:54.255984Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:22:54.256009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:22:54.256064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-10T12:22:54.256112Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:22:54.256137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2026-01-10T12:22:54.256162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-10T12:22:54.256807Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:54.256885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:54.256916Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:54.256946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2026-01-10T12:22:54.256976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:22:54.257985Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:54.258061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:54.258091Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:54.258119Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-10T12:22:54.258152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-10T12:22:54.258223Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:22:54.259977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:22:54.261051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-10T12:22:54.261322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:22:54.261371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-10T12:22:54.261455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:22:54.261476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:22:54.261911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:22:54.262039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:22:54.262077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:330:2319] 2026-01-10T12:22:54.262231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:22:54.262305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:22:54.262327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:330:2319] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-10T12:22:54.262689Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:54.262896Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 235us result status StatusSuccess 2026-01-10T12:22:54.268604Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:54.269065Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:54.269234Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 182us result status StatusSuccess 2026-01-10T12:22:54.269438Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [GOOD] >> TSchemeShardSysViewTest::CreateSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:53.578983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:53.579079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.579136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:53.579178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:53.579213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:53.579255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:53.579327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.579403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:53.580145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:53.580402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:53.670649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:53.670706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:53.685414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:53.685523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:53.685709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:53.694540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:53.694767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:53.695421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:53.695628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:53.697817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.697986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:53.699137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:53.699194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.699253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:53.699293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:53.699347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:53.699567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.832572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.833966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.834004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.834062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.834101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.834158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.834223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720657, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:22:54.582926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:54.582951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:774:2750], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 2 2026-01-10T12:22:54.582987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:774:2750], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 38 2026-01-10T12:22:54.583068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.583107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720657:0 ProgressState 2026-01-10T12:22:54.583189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-10T12:22:54.583238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-10T12:22:54.583274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-10T12:22:54.583301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-10T12:22:54.583338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2026-01-10T12:22:54.583377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-10T12:22:54.583413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976720657:0 2026-01-10T12:22:54.583443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976720657:0 2026-01-10T12:22:54.583497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:22:54.583526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976720657, publications: 2, subscribers: 1 2026-01-10T12:22:54.583554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-10T12:22:54.583581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:22:54.585800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.585915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.585960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-10T12:22:54.586006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-10T12:22:54.586072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-10T12:22:54.586754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.586833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.586855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-10T12:22:54.586878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:22:54.586910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:22:54.586964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720657, subscribers: 1 2026-01-10T12:22:54.587001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:780:2756] 2026-01-10T12:22:54.590320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.590870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-10T12:22:54.590999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:780:2756] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/ds_pdisks' 2026-01-10T12:22:54.591054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:780:2756] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:722:2709] sender: [1:808:2058] recipient: [1:15:2062] 2026-01-10T12:22:54.655421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:54.655679Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 261us result status StatusSuccess 2026-01-10T12:22:54.656102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:54.656801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:54.657006Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 215us result status StatusSuccess 2026-01-10T12:22:54.657310Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976720657 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] >> TSchemeShardSysViewTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:53.127264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:53.127358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.127392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:53.127432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:53.127464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:53.127518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:53.127577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:53.127652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:53.128454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:53.128727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:53.213859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:53.213921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:53.229083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:53.229207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:53.229394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:53.239660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:53.239906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:53.240627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:53.240907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:53.243044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.243230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:53.244422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:53.244495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:53.244581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:53.244631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:53.244671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:53.244895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:53.387042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.388926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.389003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.389085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:53.389173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:55.060753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:55.060805Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-10T12:22:55.060956Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:22:55.061141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-10T12:22:55.061211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:22:55.063243Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:55.063294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:22:55.063526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:22:55.063633Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:55.063665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-10T12:22:55.063705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:22:55.063958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.064009Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:22:55.064122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:55.064151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:55.064184Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:55.064211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:55.064240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:22:55.064271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:55.064299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:22:55.064326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:22:55.064390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:22:55.064424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:22:55.064478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-10T12:22:55.064504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:22:55.065072Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:55.065146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:55.065181Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:55.065217Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-10T12:22:55.065253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-10T12:22:55.065785Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:55.065842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:55.065868Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:55.065888Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:22:55.065915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:22:55.065977Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:22:55.066743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:22:55.066809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:22:55.066917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-10T12:22:55.069258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:22:55.070895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:22:55.071016Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2026-01-10T12:22:55.071315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:22:55.071363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-10T12:22:55.071482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:22:55.071509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:22:55.071964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:22:55.072165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:22:55.072246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:22:55.072286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:736:2725] 2026-01-10T12:22:55.072462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:22:55.072491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:736:2725] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-10T12:22:55.073023Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:55.073280Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 292us result status StatusPathDoesNotExist 2026-01-10T12:22:55.073493Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:54.342648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:54.342747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:54.342784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:54.342830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:54.342865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:54.342895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:54.343006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:54.343064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:54.343847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:54.344188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:54.423362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:54.423423Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:54.435655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:54.435768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:54.435939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:54.444363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:54.444595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:54.445158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:54.445320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:54.447016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:54.447140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:54.448164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:54.448208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:54.448286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:54.448317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:54.448347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:54.448565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:54.585043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.586911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.587002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:54.587074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -01-10T12:22:55.323084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-10T12:22:55.323097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-10T12:22:55.323115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-10T12:22:55.323132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-10T12:22:55.323145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-10T12:22:55.323159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-10T12:22:55.323184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-10T12:22:55.323199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-10T12:22:55.323211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-10T12:22:55.323237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-10T12:22:55.323259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-10T12:22:55.323277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-10T12:22:55.323290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-10T12:22:55.323302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:22:55.323313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:22:55.323327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:22:55.323339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:22:55.323354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:22:55.323369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:22:55.323384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:22:55.323396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:22:55.323421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:22:55.323436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:22:55.323449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-10T12:22:55.323573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.323662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.323724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.323798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.323856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.323940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.324946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.325911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:22:55.332307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:55.335782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:55.335837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:55.335879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:55.335925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:55.335961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:55.336097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:719:2706] sender: [1:779:2058] recipient: [1:15:2062] 2026-01-10T12:22:55.405301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:55.405619Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 309us result status StatusSuccess 2026-01-10T12:22:55.405964Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:27.063300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:27.188919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:27.197769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:27.198230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:27.198317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:27.659059Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:27.776563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.776698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.812516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:27.909734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:28.596613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:28.597743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:28.597779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:28.597800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:28.597902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:28.662599Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:29.161191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:32.110849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:32.119757Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:32.122994Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:32.160934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:32.161062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:32.191013Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:32.193316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:32.371595Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:32.371685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:32.372976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.373407Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.373993Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.374848Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.375088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.375261Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.375376Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.375517Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.375646Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:32.390906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:32.627660Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:32.672765Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:32.672875Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:32.709086Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:32.710606Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:32.710824Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:32.710880Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:32.710936Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:32.711071Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:32.711129Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:32.711182Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:32.711825Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:32.734025Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:32.734161Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:32.758638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:32.759595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:32.816270Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:32.819337Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:32.834032Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:32.834109Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:32.834178Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:32.843327Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:32.863660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:32.885943Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:32.886081Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:32.894966Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:33.091420Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:33.103989Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:33.423396Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:33.563558Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:33.563644Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:34.260534Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... chemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:43.738568Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:45.237032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:21:45.237087Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:55.281745Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:21:55.281840Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:21:55.281883Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:21:55.281923Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:21:57.087140Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:21:57.087207Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:21:57.087394Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:21:57.100146Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:21:57.433612Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:21:57.437813Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:21:57.448020Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:21:57.448187Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], Start read next stream part 2026-01-10T12:21:57.467611Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4438:3995], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:57.467705Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4449:4000], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:57.467810Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:57.468407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4452:4003], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:57.468561Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:57.472817Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4470:4009], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:57.475066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:57.514576Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4453:4004], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:21:57.627594Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4538:4053], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:57.668232Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4537:4052] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:57.875531Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4559:4066]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:21:57.875782Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:21:57.875879Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4561:4068] 2026-01-10T12:21:57.875941Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4561:4068] 2026-01-10T12:21:57.876306Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4562:4069] 2026-01-10T12:21:57.876440Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4561:4068], server id = [2:4562:4069], tablet id = 72075186224037894, status = OK 2026-01-10T12:21:57.876489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4562:4069], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:21:57.876542Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:21:57.876642Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:21:57.876722Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4559:4066], StatRequests.size() = 1 2026-01-10T12:21:57.876788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:22:54.287453Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpb2823ycyf32x769xn56", SessionId: ydb://session/3?node_id=2&id=OTY2OWU3M2ItYzI4OTRhNDktNjA3YjM3MjktMjc1OGZmMTM=, Slow query, duration: 56.822550s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:22:54.288689Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:54.288883Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], Start read next stream part 2026-01-10T12:22:54.289076Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-10T12:22:54.289340Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4681:4181], ActorId: [2:4682:4182], Starting query actor #1 [2:4683:4183] 2026-01-10T12:22:54.289390Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4682:4182], ActorId: [2:4683:4183], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:22:54.291892Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4682:4182], ActorId: [2:4683:4183], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmM5NTExZjctNTc1NTFmY2ItZmEwNjg1YmQtZDE1YmEzZTg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:22:54.292487Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:54.292549Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4415:2462], ActorId: [2:4425:3989], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmRhZjk5ZDItNzYzMzA3MjMtOWYwNTk2M2EtYmEwYWQzMGE=, TxId: 2026-01-10T12:22:54.376572Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4700:4197]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:54.376761Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:54.376795Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4700:4197], StatRequests.size() = 1 2026-01-10T12:22:54.519946Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4682:4182], ActorId: [2:4683:4183], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmM5NTExZjctNTc1NTFmY2ItZmEwNjg1YmQtZDE1YmEzZTg=, TxId: 2026-01-10T12:22:54.520029Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4682:4182], ActorId: [2:4683:4183], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmM5NTExZjctNTc1NTFmY2ItZmEwNjg1YmQtZDE1YmEzZTg=, TxId: 2026-01-10T12:22:54.520405Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4681:4181], ActorId: [2:4682:4182], Got response [2:4683:4183] SUCCESS 2026-01-10T12:22:54.520713Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:22:54.534957Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:22:54.535059Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3292:3444] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> TSchemeShardSysViewTest::CreateExistingSysView >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:56.194410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:56.194509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:56.194554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:56.194625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:56.194665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:56.194698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:56.194806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:56.194899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:56.195777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:56.196115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:56.288594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:56.288666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:56.301714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:56.301834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:56.301987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:56.309220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:56.309463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:56.310094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:56.310321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:56.312299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:56.312465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:56.313523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:56.313573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:56.313622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:56.313661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:56.313694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:56.313921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:56.438597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.442547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.442767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.442870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.442978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:56.443833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:22:57.432468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-10T12:22:57.432692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-10T12:22:57.433525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:57.433666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:22:57.433728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-10T12:22:57.433895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:22:57.434127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-10T12:22:57.434224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:22:57.436680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:57.436729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:22:57.437050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:22:57.437223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:57.437277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:822:2780], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-10T12:22:57.437347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:822:2780], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:22:57.437817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:22:57.437890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:22:57.438010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:57.438051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:57.438093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:22:57.438128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:57.438181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:22:57.438224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:22:57.438267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:22:57.438303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:22:57.438386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:22:57.438430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:22:57.438477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2026-01-10T12:22:57.438523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:22:57.439287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:57.439393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:57.439456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:57.439503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-10T12:22:57.439546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-10T12:22:57.440325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:57.440426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:22:57.440458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:22:57.440489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:22:57.440519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:22:57.440582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:22:57.443646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:22:57.443751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:22:57.444009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:22:57.444067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:22:57.444556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:22:57.444661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:22:57.444699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:849:2805] TestWaitNotification: OK eventTxId 102 2026-01-10T12:22:57.445317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:57.445567Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 259us result status StatusSuccess 2026-01-10T12:22:57.445940Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> TxUsage::WriteToTopic_Demo_27_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:22:37.066930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:37.161551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:22:37.167741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:37.168035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:37.168147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:37.453163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:37.453300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:37.459904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047754358316 != 1768047754358319 2026-01-10T12:22:37.472314Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:37.518229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:37.609269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:37.903152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:37.916408Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:38.016227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:38.284510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-10T12:22:38.501280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.501415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.501488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.502536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.502802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:38.505948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:38.649451Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:38.730559Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } ... waiting for SysViewsRoster update finished 2026-01-10T12:22:43.254577Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:22:43.261724Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:22:43.261947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:22:43.261990Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:22:43.467486Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:43.467597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:43.488811Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047760594706 != 1768047760594710 2026-01-10T12:22:43.492239Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:43.534388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:43.602086Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:22:43.918053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:43.932322Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:44.034178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:44.260226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:44.478477Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:44.478596Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:44.478678Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:44.479674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:44.479757Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:44.484564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:44.630381Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:22:44.666885Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for at least 2 blocked commits 2026-01-10T12:22:47.153875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:22:47.153941Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037889 ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> TxUsage::WriteToTopic_Demo_38_Table >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TSchemeShardLoginTest::UserLogin >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TWebLoginService::AuditLogLoginSuccess >> TWebLoginService::AuditLogEmptySIDsLoginSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:57.874942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:57.875041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:57.875072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:57.875105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:57.875128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:57.875149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:57.875202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:57.875255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:57.875887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:57.876128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:57.945177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:57.945239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:57.959823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:57.959937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:57.960137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:57.968092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:57.968317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:57.968978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:57.969191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:57.971276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:57.971443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:57.972611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:57.972653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:57.972694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:57.972725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:57.972760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:57.972944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:22:58.084217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.085989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.086071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:22:58.086117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-10T12:22:58.627046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:22:58.627176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:22:58.627221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:22:58.627267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-10T12:22:58.627335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-10T12:22:58.627875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:22:58.627961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:22:58.627989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:22:58.628014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:22:58.628044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:22:58.628125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:22:58.631281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:22:58.631353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:22:58.631505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:22:58.631546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:22:58.631818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:22:58.631890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:22:58.631917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:704:2694] TestWaitNotification: OK eventTxId 101 2026-01-10T12:22:58.632358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:58.632619Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 243us result status StatusSuccess 2026-01-10T12:22:58.632904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-10T12:22:58.635881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:22:58.636043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0 2026-01-10T12:22:58.636107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2026-01-10T12:22:58.636285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-10T12:22:58.638477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-10T12:22:58.638737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:22:58.638959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:22:58.639000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:22:58.639307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:22:58.639392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:22:58.639426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:712:2702] TestWaitNotification: OK eventTxId 102 2026-01-10T12:22:58.639945Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:22:58.640190Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 325us result status StatusSuccess 2026-01-10T12:22:58.640521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq [GOOD] >> IndexBuildTest::RejectsCancel >> DataStreams::TestShardPagination [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2026-01-10T12:22:44.906764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707327035864921:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.907212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.185937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.225080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.225199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.258992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.403725Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.423339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.559627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.559662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.559671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.559742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.919010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.923520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.989174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.151106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:46.391323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:46.556426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:49.214982Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707346713764430:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:49.218101Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:49.253603Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.342369Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.342474Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.348718Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.363472Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:49.452963Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:49.452996Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:49.453004Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:49.453089Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.545441Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:49.717126Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.819345Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:50.035602Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:50.222318Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2026-01-10T12:22:51.296134Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:52.454979Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:52.530338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:52.613531Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:52.771705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:54.382759Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707369583816048:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:54.382821Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:54.423548Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:54.538016Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:54.550753Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:54.550851Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:54.566925Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:54.621881Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:54.636967Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:54.636992Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:54.636999Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:54.637069Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:54.919277Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:55.009195Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:55.210423Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:55.221377Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-10T12:22:55.387347Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> TSchemeShardLoginFinalize::NoPublicKeys |93.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLogin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2026-01-10T12:22:44.916696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707325519863221:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.916776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.200182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.233352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.233456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.273306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.363694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.376162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.559055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.559100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.559108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.559202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.910438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.925364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.972722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.128393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2026-01-10T12:22:46.429972Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707334109799918:3571] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:49.683671Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707346580634477:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:49.684235Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:49.714420Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.855977Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.865590Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.865670Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.901746Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:50.018011Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:50.031006Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:50.031027Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:50.031060Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:50.031150Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:50.290923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:50.350153Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:50.521427Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:50.733016Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:55.203846Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707372257912277:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:55.203926Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:55.232137Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:55.348249Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:55.376598Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:55.376687Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:55.407528Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:55.473985Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:55.480425Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:55.480466Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:55.480474Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:55.480559Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:55.723069Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:55.775129Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:55.951284Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:56.216694Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> TSchemeShardLoginFinalize::NoPublicKeys [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> DataStreams::TestUnsupported [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TWebLoginService::AuditLogCreateModifyUser >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLogout >> TSchemeShardLoginFinalize::InvalidPassword [GOOD] >> TSchemeShardLoginFinalize::Success >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TWebLoginService::AuditLogLogout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2026-01-10T12:22:46.375923Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707333199743873:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:46.377776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:46.572944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:46.597059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:46.597180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:46.635818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:46.681494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:46.759804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:46.763588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:46.763630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:46.763644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:46.763817Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:46.966395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:47.010877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:47.140021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:47.264981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:47.386041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:50.250353Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707350959842701:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:50.250420Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:50.269530Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:50.353533Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:50.372164Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:50.372237Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:50.392586Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:50.430197Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005780s 2026-01-10T12:22:50.456167Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:50.465837Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:50.465859Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:50.465867Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:50.465938Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:50.646980Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:50.764118Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:50.939189Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:51.096743Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:51.142615Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-01-10T12:22:51.255062Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } records { sequence_number: "68" shard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2026-01-10T12:22:55.251081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707350959842701:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:55.251320Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } 2026-01-10T12:22:58.570042Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707385185955196:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:58.570734Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:58.583457Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:58.664594Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:58.682299Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:58.682384Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:58.699469Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:58.759439Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:58.759459Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:58.759467Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:58.759541Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:58.816975Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:59.013793Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:59.077567Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:59.297795Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.958196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.958263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.974289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.974392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.974561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.982751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.982970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.983485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.983770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.996014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.996050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.996114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.004759Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:23:00.166960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:23:00.167233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.167496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:23:00.167547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:23:00.167783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:00.167863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:00.170548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:00.170822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:23:00.171039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.171125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:23:00.171172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:23:00.171216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:23:00.173673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.173774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:23:00.173827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:23:00.175874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.175940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.176006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.176066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:23:00.180457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:23:00.182776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:23:00.183011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:23:00.184322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:00.184491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:00.184542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.184894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:23:00.184975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.185172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:00.185287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:23:00.187770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:00.187850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ration: MODIFY USER, path: /MyRoot 2026-01-10T12:23:02.994794Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:02.994833Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:02.994985Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:02.995032Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-10T12:23:02.995529Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:23:02.995628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:23:02.995671Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:23:02.995724Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:23:02.995790Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:02.995884Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-10T12:23:02.997333Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2026-01-10T12:23:02.930742Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T12:23:02.949307Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-10T12:23:02.965883Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-10T12:23:02.974462Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-10T12:23:02.982996Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-10T12:23:02.992026Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2026-01-10T12:23:02.992026Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-10T12:23:03.000463Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:23:03.005233Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:03.005345Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:23:03.005399Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:03.005441Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:23:03.005476Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:03.005525Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:23:03.005586Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-10T12:23:03.005621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:03.005656Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-10T12:23:03.005689Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-10T12:23:03.005723Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:23:03.007528Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:03.007635Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-10T12:23:03.007785Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:03.007826Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:03.007966Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:03.008005Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-10T12:23:03.008482Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:23:03.008572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:23:03.008608Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:23:03.008648Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:23:03.008712Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:03.008790Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-10T12:23:03.010195Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2026-01-10T12:23:02.930742Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T12:23:02.949307Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-10T12:23:02.965883Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-10T12:23:02.974462Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-10T12:23:02.982996Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-10T12:23:02.992026Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-10T12:23:03.005134Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2026-01-10T12:23:03.005134Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TCmsTest::ManualRequestApprovalLockingAllNodes >> TCmsTest::RequestRestartServicesOk >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2026-01-10T12:22:51.105321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707354671135958:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:51.108357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:51.334582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:51.385312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:51.385655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:51.434458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:51.448125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:51.529362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:51.529398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:51.529439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:51.529554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:51.592348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:51.739225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:51.827312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:51.985362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:52.119152Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:55.218767Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707373495572816:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:55.218888Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:55.240617Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:55.322724Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:55.338430Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:55.338486Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:55.360310Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:55.404762Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:55.412502Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:55.412528Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:55.412538Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:55.412610Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:55.603796Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:55.731321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:55.936610Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:55.949051Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-10T12:22:56.082500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:56.103519Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7593707377790541730:3201], for# user2@builtin, access# DescribeSchema 2026-01-10T12:22:56.110884Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7593707377790541733:3202], for# user2@builtin, access# DescribeSchema 2026-01-10T12:22:56.120925Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:56.226542Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:59.186148Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707392292698450:2271];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:59.186247Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:59.192027Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:59.293299Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.309605Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:59.309673Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:59.331622Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:59.414203Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:59.414230Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:59.414239Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:59.414317Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:59.470771Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:59.654061Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:59.705012Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:59.909228Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 2026-01-10T12:23:00.201081Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.950914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.950970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.965864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.965964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.967043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.980555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.980817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.981594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.983790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.003318Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:23:00.126698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:23:00.127928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.129156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:23:00.129224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:23:00.130633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:00.130763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:00.134284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:00.135378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:23:00.135629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.135830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:23:00.135879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:23:00.135934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:23:00.138409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.138482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:23:00.138538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:23:00.140522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.140576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.140630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.140682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:23:00.150053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:23:00.152260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:23:00.153402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:23:00.154551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:00.154680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:00.154719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.155060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:23:00.155142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:23:00.155329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:00.156117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:23:00.158594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:00.158638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... .cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:03.182675Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:23:03.182724Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:03.182774Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:23:03.182819Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:03.182893Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:23:03.182966Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-10T12:23:03.183014Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:03.183064Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:23:03.183109Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2026-01-10T12:23:03.183152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2026-01-10T12:23:03.184102Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:23:03.186315Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:03.186419Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-10T12:23:03.186629Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:03.186683Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:03.186860Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:03.186915Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:23:03.188157Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:03.188325Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:03.188382Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:23:03.188429Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2026-01-10T12:23:03.188484Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:03.188587Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:23:03.189165Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-10T12:23:03.190436Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-10T12:23:03.190860Z node 5 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2026-01-10T12:23:03.192860Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:03.192917Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:03.350635Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:03.356191Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:03.356329Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:03.356377Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:03.357431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-10T12:23:03.357503Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:03.357553Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:03.358304Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:03.359080Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:03.359268Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 216us result status StatusSuccess 2026-01-10T12:23:03.359790Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5opJjwlMe8GwOA2JoZ6z\ndYHDU099Y6cTUItksNEMp6MmdpIqvr15l6+vqqncYdCMVMYk3hBhtgc4C6ih/KGs\nhE8CSLFG+knXM59V0YlMY3Mj0EfMzenFMdHl9jzuQPRRfPqKZRRFnTykQU8RMncO\n0ANfYjK+li3xlWjIXPQFXBKNxbm0YarQfd/xyI5Fy+UUdte7YLSqJZWIZZKSlxOd\nbXDf+FNJzXT5mnqKIomjYzMEpn22Xwe/0J2yvF3iljPjB36lWiB1ttgXhrZOq1Ei\nzcU5ZrGZX6WVWYEDvVIi34CWoXa6zqT2fbPMr7pU53Wyt2UYsRy1imulkP8nsyUU\nnQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134183344 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:03.360310Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-10T12:23:03.360377Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: No ydb_session_id cookie 2026-01-10T12:23:03.362538Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-10T12:23:03.367736Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (589A015B): Token is not in correct format 2026-01-10T12:23:03.367873Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2026-01-10T12:23:03.368397Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2026-01-10T12:23:03.155858Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-10T12:23:03.182363Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-10T12:23:03.353378Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY4MDkwOTgzLCJpYXQiOjE3NjgwNDc3ODMsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2026-01-10T12:23:03.369734Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY4MDkwOTgzLCJpYXQiOjE3NjgwNDc3ODMsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2026-01-10T12:23:03.369734Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY4MDkwOTgzLCJpYXQiOjE3NjgwNDc3ODMsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> DataStreams::TestPutRecords [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> TCmsTest::CollectInfo >> IndexBuildTest::RejectsCancel [GOOD] >> IndexBuildTest::NullsAreUniq >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> TCmsTenatsTest::TestClusterLimit >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.960298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.960387Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.975649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.975725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.975849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.984377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.984619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.985150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.985337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.193726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.194803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.194932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.195947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 12:23:05.010319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:23:05.010349Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:23:05.010380Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:23:05.010415Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:05.010485Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:23:05.013002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:23:05.013382Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:05.013757Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:676:2665] Bootstrap 2026-01-10T12:23:05.015073Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:676:2665] Become StateWork (SchemeCache [5:681:2670]) 2026-01-10T12:23:05.015441Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:05.015604Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2026-01-10T12:23:05.016121Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:05.016778Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:676:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:23:05.018567Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 2026-01-10T12:23:05.019445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:05.019496Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:05.068425Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 2026-01-10T12:23:05.068543Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:05.068590Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.068748Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:05.068785Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:05.069199Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:05.069577Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:05.069736Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 210us result status StatusSuccess 2026-01-10T12:23:05.070108Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAudf3vg796gPNlqRINMsI\ndZf2UDikZUrwg5unWhsfeitnLjYz2Igzw1XrZ0hd669lldl6FPjGogY9sCBUJLV2\ndO8Na24U/esifl/I8MyzCQIazvXbZBVrZc2KNYM63kkhxONUGn5HZghkfOB6Os84\nnR7AGaBFja95fH9aFFGc0OsBbrCz9dJizCf2olPqYtOt4rZ7Mw43HoeI5sit+u9a\nL4Xa3h9f7vJ9J3A9rUEQjaJ3JQG5550ur866byorGr/UViW9jv6BLFEHRHvmlBsc\nzYJylgTkwP2gaKNcy6HyWVZ1w/d7uLXUhnbLhN6a1vWyL+v3bchnXIUYHYaAhZdu\nnQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134185066 } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2026-01-10T12:22:44.838451Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707327789092788:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.838496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.056228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.184339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.184498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.243406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.328225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.344066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.558990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.559103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.559114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.559214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.843121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.910689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.972850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.139530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:48.899340Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707344865351294:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:48.899376Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:48.921735Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.015989Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.033880Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.033963Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.056717Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:49.123429Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:49.123453Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:49.123460Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:49.123539Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.181155Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:49.298978Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.446477Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:49.662314Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:49.856433Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:49.908887Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:49.916244Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2026-01-10T12:22:49.987904Z :INFO: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] Starting read session 2026-01-10T12:22:49.988946Z :DEBUG: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] Starting session to cluster null (localhost:3097) 2026-01-10T12:22:50.000824Z :DEBUG: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:22:50.000885Z :DEBUG: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:22:50.001060Z :DEBUG: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] [null] Reconnecting session to cluster null in 0.000000s 2026-01-10T12:22:50.014266Z :DEBUG: [/Root/] [/Root/] [d098034e-e3503919-155f59f0-48a9a43a] [null] Successfully connected. Initializing session 2026-01-10T12:22:50.014687Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2026-01-10T12:22:50.014712Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 1 2026-01-10T12:22:50.037219Z node 4 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-01-10T12:22:50.041844Z node 4 :PQ_READ_PROXY INFO: read_session_actor.cpp:941: session cookie 1 consumer user1 session user1_4_1_6487205168999588217_v1 read init: from# ipv6:[::1]:57536, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-01-10T12:22:50.042269Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer user1 session user1_4_1_6487205168999588217_v1 auth for : user1 2026-01-10T12:22:50.045767Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 1 consumer user1 session user1_4_1_6487205168999588217_v1 Handle describe topics response 2026-01-10T12:22:50.045961Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer user1 session user1_4_1_6487205168999588217_v1 auth is DEAD 2026-01-10T12:22:50.047240Z node 4 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 1 consumer user1 session user1_4_1_6487205168999588217_v1 auth ok: topics# 1, initDone# 0 2026-01-10T12:22:50.048629Z node 4 :PQ_READ_PROXY INFO: read_session_actor.cp ... ntReadOffset 0 2026-01-10T12:22:58.737922Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:22:58.738132Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2026-01-10T12:22:58.738313Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:22:58.738375Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:22:58.738427Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-10T12:22:58.824689Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:22:58.824909Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2026-01-10T12:22:58.824973Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2026-01-10T12:22:58.825032Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2026-01-10T12:22:58.825072Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2026-01-10T12:22:58.825091Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-10T12:22:58.825115Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2026-01-10T12:22:58.825165Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2026-01-10T12:22:58.825178Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2026-01-10T12:22:58.825213Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2026-01-10T12:22:58.825216Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2026-01-10T12:22:58.825236Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2026-01-10T12:22:58.825344Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2026-01-10T12:22:58.825402Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2026-01-10T12:22:58.825439Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-10T12:22:58.825626Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2026-01-10T12:22:58.825660Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2026-01-10T12:22:58.826559Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2026-01-10T12:22:58.826597Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2026-01-10T12:22:58.826627Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2026-01-10T12:22:58.827277Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2026-01-10T12:22:58.828206Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2026-01-10T12:22:58.830150Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2026-01-10T12:22:58.831172Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2026-01-10T12:22:58.835494Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2026-01-10T12:22:58.836686Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2026-01-10T12:22:58.837595Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2026-01-10T12:22:58.838624Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2026-01-10T12:22:58.847046Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2026-01-10T12:22:58.847110Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2026-01-10T12:22:58.849535Z :INFO: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] Closing read session. Close timeout: 0.000000s 2026-01-10T12:22:58.849603Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:1:5:8:0 null:stream_TestPutRecordsCornerCases:3:4:3:0 null:stream_TestPutRecordsCornerCases:2:3:0:0 null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:0:1:1:0 2026-01-10T12:22:58.849645Z :INFO: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] Counters: { Errors: 0 CurrentSessionLifetimeMs: 135 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:22:58.849736Z :NOTICE: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:22:58.849784Z :DEBUG: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] [null] Abort session to cluster 2026-01-10T12:22:58.850681Z :NOTICE: [/Root/] [/Root/] [8f28c498-3bc21ab2-e99c6683-a7500640] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:22:58.850889Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer user1 session user1_7_1_13078588170249499980_v1 grpc read failed 2026-01-10T12:22:58.850942Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer user1 session user1_7_1_13078588170249499980_v1 grpc closed 2026-01-10T12:22:58.851004Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer user1 session user1_7_1_13078588170249499980_v1 is DEAD 2026-01-10T12:23:00.361738Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593707396083346048:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:00.362093Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:00.382869Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:00.518768Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:00.546497Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:00.546609Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:00.556209Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:00.650713Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:00.650733Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:00.650739Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:00.650811Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:00.678161Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:00.946639Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:01.061738Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:01.289743Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:01.367458Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:01.577592Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101)
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2026-01-10T12:23:01.701803Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TCmsTest::RequestRestartServicesReject |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardLoginFinalize::Success [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.950979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.951054Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.965380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.965467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.967026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.976767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.977998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.981176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.983793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.170625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.171661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.171791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.171869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.171936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.171997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.172796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.369930Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2026-01-10T12:23:05.369977Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370008Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2026-01-10T12:23:05.370037Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370058Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2026-01-10T12:23:05.370087Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370111Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2026-01-10T12:23:05.370139Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370162Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2026-01-10T12:23:05.370197Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370219Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-10T12:23:05.370247Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-10T12:23:05.370300Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.370321Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-10T12:23:05.370369Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.370398Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:23:05.370823Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:05.370965Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:23:05.371006Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:05.371050Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:23:05.371088Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:05.371147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:05.371210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-10T12:23:05.371252Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:23:05.371293Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-10T12:23:05.371332Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-10T12:23:05.371377Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-01-10T12:23:05.373801Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:05.373895Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2026-01-10T12:23:05.374084Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:05.374135Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.374330Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:05.374373Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-10T12:23:05.374914Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:23:05.375028Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:23:05.375082Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:23:05.375132Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-10T12:23:05.375182Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:05.375291Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-10T12:23:05.377232Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-01-10T12:23:05.377805Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:05.378003Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 226us result status StatusSuccess 2026-01-10T12:23:05.378529Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::BanUnbanUser >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.958050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.958130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.973258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.973360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.973524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.982563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.982826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.983526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.983877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.181674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.182773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.182912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.182992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.183921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 944, LocalPathId: 10] was 0 2026-01-10T12:23:05.717612Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-10T12:23:05.717649Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-10T12:23:05.717672Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-10T12:23:05.717693Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-10T12:23:05.717715Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-10T12:23:05.717750Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-10T12:23:05.717787Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-10T12:23:05.717808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-10T12:23:05.717848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-10T12:23:05.717876Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-10T12:23:05.717896Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-10T12:23:05.717932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-10T12:23:05.717956Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-10T12:23:05.717978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-10T12:23:05.717999Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-10T12:23:05.718021Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-10T12:23:05.718054Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-10T12:23:05.718083Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-10T12:23:05.718104Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-10T12:23:05.718125Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-10T12:23:05.718147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-10T12:23:05.718172Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:23:05.718193Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:23:05.718219Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:23:05.718240Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:23:05.718261Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:23:05.718281Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:23:05.720576Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.720693Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.720779Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.720884Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.720988Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.721139Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.721455Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.721581Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722000Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722081Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722349Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722503Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722553Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722650Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.722938Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.723044Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.723471Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.723728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.723835Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.723898Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.724038Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.724119Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.724197Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:05.730460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:05.735573Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:05.735661Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:05.735857Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:05.735925Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.735980Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:05.736152Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:818:2786] sender: [5:874:2058] recipient: [5:15:2062] 2026-01-10T12:23:05.774813Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:05.774884Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:05.828823Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:05.833970Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:05.834094Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:05.834144Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.835301Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-10T12:23:05.835364Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:05.835421Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:868:2825], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:05.835951Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 0 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> DataStreams::TestListShards1Shard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.961315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.961395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.977137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.977230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.977396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.985725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.985909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.986530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.986695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.144838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.147960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... LocalPathId: 2] 2026-01-10T12:23:05.867757Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-10T12:23:05.867777Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.867792Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-10T12:23:05.867813Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-10T12:23:05.867830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-10T12:23:05.867852Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.867872Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:23:05.867900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 39] name: DirSub1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:23:05.867914Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:23:05.868287Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:05.868379Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:23:05.868430Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:23:05.868471Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:23:05.868506Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:23:05.868550Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:05.868598Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-10T12:23:05.868629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:23:05.868660Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:23:05.868703Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 0 2026-01-10T12:23:05.868737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-10T12:23:05.870601Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusSuccess TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:05.870723Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2026-01-10T12:23:05.870887Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:05.870921Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:05.871071Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:05.871108Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 107, path id: 1 2026-01-10T12:23:05.871558Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:23:05.871646Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:23:05.871678Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:23:05.871709Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-10T12:23:05.871751Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:05.871827Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 0 2026-01-10T12:23:05.873157Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 TestModificationResult got TxId: 107, wait until txId: 107 2026-01-10T12:23:05.873844Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:05.874000Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 167us result status StatusSuccess 2026-01-10T12:23:05.874345Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:05.875033Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:05.875181Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 160us result status StatusSuccess 2026-01-10T12:23:05.875449Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:05.875947Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:05.876013Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table >> DataStreams::ListStreamsValidation [GOOD] >> TCmsTest::ManualRequestApprovalLockingAllNodes [GOOD] >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2026-01-10T12:22:48.942959Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707341972319807:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:48.943745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:49.130145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.168633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.168783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.205140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:49.292845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.322160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:49.393169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:49.393210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:49.393252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:49.393342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.597633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.731244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:49.921735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:49.944671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-10T12:22:49.956458Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:50.246872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-01-10T12:22:50.321219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:50.389622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:50.453828Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:22:50.455583Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:22:50.455613Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:50.461281Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-10T12:22:50.461340Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-10T12:22:50.461406Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1768047770124-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047770}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1768047770354-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1768047770}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1768047770352-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047770}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1768047770124-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047770}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1768047770354-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1768047770}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1768047770352-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1768047770,"finish":1768047770},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047770}' 2026-01-10T12:22:53.205224Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707366420892638:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:53.205812Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:53.220423Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:53.305338Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profi ... 00:1768047778.378633 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.379660 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.387664 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.387774 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.392666 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.392756 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.397381 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.397549 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:22:58.413829Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:58.451709Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1768047778.493908 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.494004 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:22:58.502658Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1768047778.544402 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.544558 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:22:58.552421Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1768047778.597866 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.597977 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.611618 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.611798 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:22:58.648031Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:58.665187Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2026-01-10T12:22:58.665248Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-10T12:22:58.665273Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-10T12:22:58.665298Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2026-01-10T12:22:58.665316Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-10T12:22:58.665335Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-10T12:22:58.672061Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:58.677404Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-01-10T12:22:58.677498Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-10T12:22:58.677532Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-10T12:22:58.677561Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-01-10T12:22:58.677590Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-10T12:22:58.677621Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1768047778.687173 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047778.687286 412730 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-10T12:23:02.238643Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593707402194399184:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:02.238723Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:02.257703Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:02.361468Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:02.379105Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:02.379208Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:02.400032Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:02.460125Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:02.460151Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:02.460161Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:02.460252Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:02.505358Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:02.698380Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:02.747850Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:02.947104Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) E0000 00:00:1768047783.073573 414892 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047783.082374 414892 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047783.088911 414892 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047783.095280 414892 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1768047783.101550 414892 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> TMaintenanceApiTest::SingleCompositeActionGroup |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2026-01-10T12:22:48.462677Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707341752206492:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:48.463200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:48.652433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:48.692632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:48.692771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:48.731348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:48.811516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:48.823705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:48.880821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:48.880848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:48.880885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:48.880981Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.107529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.205358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:49.396759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:49.473698Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:51.648825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:22:51.835556Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7593707354637110316:2352] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:42:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:22:51.982086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:52.126169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:52.186959Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-10T12:22:52.187045Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-10T12:22:52.187069Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-10T12:22:52.187113Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-10T12:22:52.187131Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-10T12:22:52.187152Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-10T12:22:52.187194Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-10T12:22:52.187210Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-10T12:22:52.187221Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-10T12:22:52.187258Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-10T12:22:52.187297Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-10T12:22:52.187312Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-10T12:22:52.187333Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-10T12:22:52.191293Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-10T12:22:52.192912Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-10T12:22:52.192943Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-10T12:22:52.210756Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2026-01-10T12:22:52.210830Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-01-10T12:22:52.210865Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2026-01-10T12:22:52.210914Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-01-10T12:22:52.210945Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2026-01-10T12:22:52.210968Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2026-01-10T12:22:52.211000Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2026-01-10T12:22:52.211025Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2026-01-10T12:22:52.211049Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2026-01-10T12:22:52.211072Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-01-10T12:22:52.211112Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2026-01-10T12:22:52.211148Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2026-01-10T12:22:52.211177Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2026-01-10T12:22:52.211202Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2026-01-10T12:22:52.211265Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2026-01-10T12:22:52.211304Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2026-01-10T12:22:53.890667Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707363605345349:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:53.890718Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:53.917699Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:54.022079Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:54.036975Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:54.037047Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:54.063054Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:54.100153Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:54.188738Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:54.188774Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:54.188783Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:54.188876Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:54.426955Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:54.486057Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:54.665462Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:54.874335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:54.906937Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:54.936436Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:54.998784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:58.058423Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707384991970260:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:58.058981Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:58.077631Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:58.218117Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:58.239944Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:58.240031Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:58.248422Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:58.331030Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:58.331055Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:58.331066Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:58.331165Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:58.383142Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:58.612638Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:58.697725Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:58.905021Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:59.066305Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:59.163372Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7593707389286939674:3581] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:02.608332Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593707404632514333:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:02.608396Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:02.629259Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:02.734158Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:02.749178Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:02.749272Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:02.757116Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:02.836755Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:02.836780Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:02.836792Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:02.836870Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:02.906533Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:03.094548Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:03.159582Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:03.337211Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table [GOOD] >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [GOOD] >> VectorIndexBuildTest::Shard_Build_Error >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved [GOOD] >> TCmsTest::Notifications >> IndexBuildTest::NullsAreUniq [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeStateStorage >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::NullsAreUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.625430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.625475Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.637611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.637728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.637921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.645422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.647015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.650366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.654275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.662121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.662733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.671491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.671690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.671736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.671965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.825036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.825991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.826495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-10T12:23:09.133293Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 2/2, is published: true 2026-01-10T12:23:09.133364Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1017:2957] message: TxId: 281474976725761 2026-01-10T12:23:09.133411Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-10T12:23:09.133448Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-10T12:23:09.133480Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-10T12:23:09.133543Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-10T12:23:09.133574Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:1 2026-01-10T12:23:09.133594Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:1 2026-01-10T12:23:09.133623Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 34] was 3 2026-01-10T12:23:09.137262Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-10T12:23:09.137342Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-10T12:23:09.137403Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 107, txId# 281474976725761 2026-01-10T12:23:09.137522Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1667:3537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0}, txId# 281474976725761 2026-01-10T12:23:09.139129Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking 2026-01-10T12:23:09.139258Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1667:3537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-10T12:23:09.139329Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:23:09.140852Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done 2026-01-10T12:23:09.140997Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1667:3537], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-10T12:23:09.141072Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-10T12:23:09.141202Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:23:09.141251Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [5:1684:3554] TestWaitNotification: OK eventTxId 107 2026-01-10T12:23:09.141997Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-10T12:23:09.142300Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-10T12:23:09.143161Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-10T12:23:09.143452Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 316us result status StatusSuccess 2026-01-10T12:23:09.143999Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "test_index" LocalPathId: 33 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index1" KeyColumnNames: "index2" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TxUsage::WriteToTopic_Demo_47_Table [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageTooBig >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue >> TestSqsTopicHttpProxy::TestSendMessageBatch >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer >> TestSqsTopicHttpProxy::TestSendMessage >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl >> VectorIndexBuildTest::Shard_Build_Error [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2026-01-10T12:22:44.838621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707325324648681:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.838723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.137536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.194005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.194139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.259093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.316607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.343692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.560460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.560488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.560498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.560567Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.858510Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.915301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.972748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.143962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:46.348370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:46.400741Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:22:46.400836Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-10T12:22:46.400853Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:22:46.400868Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:22:48.977028Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707344205230005:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:48.977081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:49.002829Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.105800Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.127062Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.127146Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.153633Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:49.235201Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:49.235225Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:49.235233Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:49.235318Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.264725Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:49.517204Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.606745Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:49.829661Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:49.848737Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-10T12:22:49.987375Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:50.002411Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:50.066998Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:50.084183Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2026-01-10T12:22:50.084231Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2026-01-10T12:22:50.084259Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2026-01-10T12:22:50.084287Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2026-01-10T12:22:50.092372Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-10T12:22:50.092457Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-10T12:22:50.092493Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-10T12:22:50.092525Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-10T12:22:53.504894Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707365963400313:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:53.504927Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:53.521299Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:53.626849Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:53.642805Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:53.642897Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:53.669451Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:53.745382Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:53.745410Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:53.745418Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:53.745524Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:53.760179Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:53.967298Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:54.021849Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:54.205323Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:54.373619Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:54.421501Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:22:54.460554Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-10T12:22:54.478366Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-10T12:22:54.478403Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-10T12:22:54.480161Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-10T12:22:54.480193Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-10T12:22:54.509692Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:57.828942Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593707380324535782:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:57.829010Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:57.845436Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:57.956363Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:57.977367Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:57.977471Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:57.985715Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:58.061490Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:58.061515Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:58.061527Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:58.061602Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:58.116729Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:58.297363Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:58.351526Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:58.549236Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:58.836557Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:02.829412Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7593707380324535782:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:02.829521Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:00.978205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:00.978302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:00.978340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:00.978379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:00.978434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:00.978464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:00.978514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:00.978596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:00.979429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:00.979721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:01.068644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:01.068727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:01.082138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:01.082242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:01.082427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:01.090965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:01.091183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:01.091931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:01.092176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:01.094321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:01.094498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:01.095672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:01.095730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:01.095795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:01.095852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:01.095900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:01.096152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:01.254196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.255969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.256035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.256121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.256209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.256289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:01.256413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:06.412101Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:06.415912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:06.416050Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:06.416128Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:06.416648Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:06.416718Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:06.416774Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:06.417407Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:06.417749Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:06.417858Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:06.422916Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:06.425215Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:06.425587Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:06.425685Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:06.430604Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:06.432890Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:06.433425Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:06.433709Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 331us result status StatusSuccess 2026-01-10T12:23:06.434317Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAutu0W/hoRq2SHFkEL0Eo\n2K+FI/bVtOWU1EtmOiB16zPlYPF3XeKgKN8+hFVH5DFz7loaeNryu13h5kVKg3V8\nwFPMU0PdKQ/65n4A02bb5rSMS6JwMIhnfulS5in1nOLvVZI/Z4PaItRNvj+JyTOr\n5x/DQzvuAFQ2rtEjWP+AALP8twYHRHLYUSb7dcl8IZsTuWPpKhEG4rSVZEhvdGVd\np0IIXo8LbuCY021t6IUnvJBUi5zSPGQl/lT2gzNDdXNseBO28Y5RYjdPs+GeMnr5\n9qM949onNKFEMrxR4ENMU6rJiahqj0wBovTKroJ2qjk6IA4aUVVD/EDcMD1q7HOe\n7QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134186406 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:10.435303Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.448494Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.452256Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:10.452622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:10.453032Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.453140Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:10.458191Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.460472Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:10.460894Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.460991Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:10.465595Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.467821Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:10.468311Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.468431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:10.473440Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:10.479241Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-10T12:23:10.479806Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:10.480047Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 280us result status StatusSuccess 2026-01-10T12:23:10.480674Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAutu0W/hoRq2SHFkEL0Eo\n2K+FI/bVtOWU1EtmOiB16zPlYPF3XeKgKN8+hFVH5DFz7loaeNryu13h5kVKg3V8\nwFPMU0PdKQ/65n4A02bb5rSMS6JwMIhnfulS5in1nOLvVZI/Z4PaItRNvj+JyTOr\n5x/DQzvuAFQ2rtEjWP+AALP8twYHRHLYUSb7dcl8IZsTuWPpKhEG4rSVZEhvdGVd\np0IIXo8LbuCY021t6IUnvJBUi5zSPGQl/lT2gzNDdXNseBO28Y5RYjdPs+GeMnr5\n9qM949onNKFEMrxR4ENMU6rJiahqj0wBovTKroJ2qjk6IA4aUVVD/EDcMD1q7HOe\n7QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134186406 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2026-01-10T12:22:44.840976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707328194513886:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.841038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.117651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.184572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.185856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.201221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.318430Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.343265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.561745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.561793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.561800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.561917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.852368Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.920302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.972740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.124644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:46.390794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } 2026-01-10T12:22:49.843901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707328194513886:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:49.843953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "20" shard_id: "shard-000001" } records { sequence_number: "20" shard_id: "shard-000009" } records { sequence_number: "20" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000005" } ... ":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783072-105","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783104-106","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783104-107","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783133-108","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783133-109","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783162-110","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783162-111","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1768047783,"finish":1768047783},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047783}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1768047783189-112","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1768047783,"finish":1768047784},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047784}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783189-113","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1768047783,"finish":1768047784},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047784}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047783189-114","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1768047783,"finish":1768047784},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047784}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1768047784218-115","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1768047784,"finish":1768047785},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047785}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047784218-116","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1768047784,"finish":1768047785},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047785}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047784218-117","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1768047784,"finish":1768047785},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047785}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1768047785234-118","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1768047785,"finish":1768047786},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047786}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047785234-119","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1768047785,"finish":1768047786},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047786}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047785234-120","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1768047785,"finish":1768047786},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047786}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1768047786251-121","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1768047786,"finish":1768047787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047786251-122","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1768047786,"finish":1768047787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047786251-123","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1768047786,"finish":1768047787},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047787}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1768047787266-124","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1768047787,"finish":1768047788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047788}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047787266-125","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1768047787,"finish":1768047788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047788}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1768047787266-126","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1768047787,"finish":1768047788},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1768047788}' |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TxUsage::WriteToTopic_Demo_47_Query >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TxUsage::WriteToTopic_Demo_38_Table [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::RequestRestartServicesDryRun [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Shard_Build_Error [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.624642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.624689Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.635375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.635457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.636063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.644796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.646996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.650332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.654309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.661591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.662693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.671463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.671672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.671721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.671883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.798003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.798966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.799923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:23:10.915977Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:1032:2978], Recipient [6:1044:2988]: NKikimr::TEvTablet::TEvTabletDead 2026-01-10T12:23:10.916444Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409549 2026-01-10T12:23:10.916576Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-10T12:23:10.918078Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:23:10.918380Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-10T12:23:10.919185Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:10.919245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-10T12:23:10.919342Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:23:10.920253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:23:10.920294Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:23:10.920419Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:23:10.920525Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1299:3230], Recipient [6:865:2829]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2026-01-10T12:23:10.920560Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2026-01-10T12:23:10.920592Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409547 state Offline 2026-01-10T12:23:10.920659Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:23:10.920692Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:23:10.920771Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:23:10.920971Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 Forgetting tablet 72075186233409547 2026-01-10T12:23:10.921252Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1372:3291], Recipient [6:865:2829]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1372:3291] ServerId: [6:1374:3293] } 2026-01-10T12:23:10.921279Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-10T12:23:10.921347Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1299:3230], Recipient [6:869:2832]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2026-01-10T12:23:10.921369Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2026-01-10T12:23:10.921389Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409548 state Offline 2026-01-10T12:23:10.921527Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:23:10.921633Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:847:2816], Recipient [6:865:2829]: NKikimr::TEvTablet::TEvTabletDead 2026-01-10T12:23:10.922063Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409547 2026-01-10T12:23:10.922161Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409547 2026-01-10T12:23:10.923300Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1373:3292], Recipient [6:869:2832]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1373:3292] ServerId: [6:1375:3294] } 2026-01-10T12:23:10.923329Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed Forgetting tablet 72075186233409548 2026-01-10T12:23:10.924224Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:851:2818], Recipient [6:869:2832]: NKikimr::TEvTablet::TEvTabletDead 2026-01-10T12:23:10.924564Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409548 2026-01-10T12:23:10.924632Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409548 2026-01-10T12:23:10.925856Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:23:10.926026Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:23:10.928538Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:23:10.928606Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:23:10.928901Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:10.929034Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:10.929100Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:23:10.929179Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:23:10.929230Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:23:10.929257Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:23:10.929280Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:23:10.929315Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:23:10.930513Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:23:10.930549Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409547 2026-01-10T12:23:10.930608Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:23:10.930646Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409548 2026-01-10T12:23:10.931703Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:10.963042Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-10T12:23:10.963523Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 237 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 237 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::TestDrainAction >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer [GOOD] >> BasicUsage::ConflictingWrites |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:00.638827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:00.638926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:00.638983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:00.639038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:00.639095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:00.639131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:00.639188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:00.639269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:00.640142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:00.640442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:00.723407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:00.723484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:00.737526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:00.737626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:00.737816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:00.746296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:00.746531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:00.747259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:00.747477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:00.749796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:00.749978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:00.751375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:00.751438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:00.751507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:00.751558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:00.751611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:00.751891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.866255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.870956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.871013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.871076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.871125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6678944 2026-01-10T12:23:08.204423Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.207153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.207332Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:08.207369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:08.207625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.207672Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:08.207709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:08.208149Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:08.208391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.208468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.212596Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.213829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.214142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.214214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.218454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.220039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.220340Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.220398Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.227070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.228537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.228862Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.228945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-10T12:23:08.229207Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.229283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-10T12:23:08.229684Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:08.229851Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 200us result status StatusSuccess 2026-01-10T12:23:08.230255Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwO/00zaKY0rcY6bdYObu\nAOc2G56xx4GgyMg1MZeqcXn9+sZeUbAhEePZRjkQtqYhkDW3CJ4/pkVlyaxGbX+Y\nnz7YHuEbdNxW6IKpSiSKw2lNhSuzm31w3vNKzt3Dl3Z9q+YswTfuNRYjAfqas/1S\nSYGHSZsRstdSceOeVPZ1lrR6t836UOApfK/zVsLWeXlLaR2EjQxs/PvsGOfYi8S8\nh4znQloEJjSt22mRXvowmLK1zRi7Tg4mjVCr+0l8Z97bLjnv/Re4NbrOU56Z5c11\nxz+3eOi5G2Pt7LGVZN02fBB2xuYpyrdvUb6TroiVpytYqYYWsWJrblWVIrnXz/cD\nJwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134188199 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:12.231167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:12.236668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:12.239639Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:12.239813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:12.240361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:12.240491Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:12.245198Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:12.250936Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-10T12:23:12.251509Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:12.251746Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 280us result status StatusSuccess 2026-01-10T12:23:12.252328Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwO/00zaKY0rcY6bdYObu\nAOc2G56xx4GgyMg1MZeqcXn9+sZeUbAhEePZRjkQtqYhkDW3CJ4/pkVlyaxGbX+Y\nnz7YHuEbdNxW6IKpSiSKw2lNhSuzm31w3vNKzt3Dl3Z9q+YswTfuNRYjAfqas/1S\nSYGHSZsRstdSceOeVPZ1lrR6t836UOApfK/zVsLWeXlLaR2EjQxs/PvsGOfYi8S8\nh4znQloEJjSt22mRXvowmLK1zRi7Tg4mjVCr+0l8Z97bLjnv/Re4NbrOU56Z5c11\nxz+3eOi5G2Pt7LGVZN02fBB2xuYpyrdvUb6TroiVpytYqYYWsWJrblWVIrnXz/cD\nJwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134188199 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:03.389118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:03.389208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:03.389249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:03.389289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:03.389354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:03.389385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:03.389469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:03.389568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:03.390437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:03.390713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:03.459570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:03.459634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:03.474721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:03.474828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:03.475023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:03.482884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:03.483112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:03.483826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:03.484061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:03.486330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:03.486517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:03.487685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:03.487749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:03.487819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:03.487881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:03.487955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:03.488214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:03.623672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.624970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:03.625326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:08.318799Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-10T12:23:08.318974Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:08.319010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:08.319153Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:08.319192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:23:08.319608Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:08.319687Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:08.319724Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:23:08.319758Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-10T12:23:08.319798Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:08.319878Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-10T12:23:08.321395Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-10T12:23:08.321665Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.321699Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:08.384052Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.386831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.386928Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:08.386971Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:08.387323Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.387371Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:08.387409Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:08.387939Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:08.388260Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.388363Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.391956Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.393596Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.393808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.393866Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.397323Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.398716Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.398959Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.399017Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:08.411120Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:08.412770Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-10T12:23:08.414941Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: false } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:23:08.415321Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-10T12:23:08.415426Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:23:08.415472Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:23:08.415509Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:23:08.415538Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:23:08.415582Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:23:08.415629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:23:08.415661Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:23:08.415691Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:23:08.415723Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 1, subscribers: 0 2026-01-10T12:23:08.415754Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:23:08.417490Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSuccess TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:08.417595Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-10T12:23:08.417730Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:08.417765Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:08.417902Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:08.417938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:23:08.418332Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:23:08.418405Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:23:08.418440Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:23:08.418479Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:23:08.418512Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:08.418594Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:23:08.419966Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:23:12.421065Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:12.421233Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: account is blocked, at schemeshard: 72057594046678944 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrl >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> TxUsage::WriteToTopic_Demo_38_Query |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesDryRun [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::BridgeModeStateStorage [GOOD] >> TCmsTest::BridgeModeSysTablets >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TMaintenanceApiTest::TestDrainAction [GOOD] >> TMaintenanceApiTest::TestCordonAction >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TPQTest::IncompleteProxyResponse |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesWrongHost [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] >> TPQTest::IncompleteProxyResponse [GOOD] >> TPQTest::SmallMsgCompactificationWithRebootsTest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize >> TMaintenanceApiTest::TestCordonAction [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessage >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2026-01-10T12:23:05.068134Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty [GOOD] >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks >> TestSqsTopicHttpProxy::TestSendMessageTooBig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] Test command err: 2026-01-10T12:23:13.742957Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-10T12:23:13.743096Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-10T12:23:15.896604Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2026-01-10T12:23:15.896690Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query >> TestSqsTopicHttpProxy::TestSendMessageBatch [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TestSqsTopicHttpProxy::TestSendMessageBatchLong |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TCmsTest::VDisksEviction [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrl [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2026-01-10T12:23:16.688477Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-10T12:23:16.688568Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-10T12:23:16.688709Z node 26 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-10T12:23:16.690265Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-10T12:23:16.690829Z node 26 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-10T12:23:16.691074Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-10T12:23:16.691133Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-10T12:23:16.691183Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-10T12:23:16.691338Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:23:16.691532Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:23:16.691574Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-10T12:23:16.691871Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-10T12:23:16.691924Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:23:16.692016Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-10T12:23:16.692105Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-10T12:23:16.692138Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326: ... torage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120622560 } 2026-01-10T12:23:17.105550Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-10T12:23:17.105642Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-10T12:23:17.105700Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-10T12:23:17.105878Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-10T12:23:17.106109Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-10T12:23:17.106178Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-10T12:23:17.106447Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.100000s 2026-01-10T12:23:17.106514Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:23:17.106645Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-10T12:23:17.106708Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-10T12:23:17.106747Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2026-01-10T12:23:17.106780Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2026-01-10T12:23:17.106826Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2026-01-10T12:23:17.106853Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2026-01-10T12:23:17.106879Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2026-01-10T12:23:17.106908Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 33, wbId# [33:8388350642965737326:1634689637] 2026-01-10T12:23:17.107151Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 120524072 ChangeTime: 120524072 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.107923Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 120524072 ChangeTime: 120524072 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108194Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 120524072 ChangeTime: 120524072 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108319Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 33 CreateTime: 120524072 ChangeTime: 120524072 Path: "/33/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108392Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 120524072 ChangeTime: 120524072 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108456Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 120524072 ChangeTime: 120524072 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108519Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 120524072 ChangeTime: 120524072 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108602Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 120524072 ChangeTime: 120524072 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-10T12:23:17.108684Z node 26 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:23:17.121358Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-10T12:23:17.121646Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 26 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2026-01-10T12:23:17.122242Z node 26 :CMS INFO: cms.cpp:1441: User user removes request user-r-3 2026-01-10T12:23:17.122304Z node 26 :CMS DEBUG: cms.cpp:1464: Resulting status: OK 2026-01-10T12:23:17.122375Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2026-01-10T12:23:17.122421Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 26 2026-01-10T12:23:17.122559Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2026-01-10T12:23:17.135175Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2026-01-10T12:23:17.135384Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-10T12:18:33.801878Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-10T12:18:33.858142Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:18:33.861755Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:18:33.862053Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:18:33.862103Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:18:33.862151Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-10T12:18:33.862190Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-10T12:18:33.862267Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:33.862316Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-10T12:18:33.876843Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-10T12:18:33.876927Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:18:33.894555Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:33.897001Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:33.897149Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:18:33.897848Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-10T12:18:33.897996Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:18:33.898297Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:18:33.898615Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-10T12:18:33.900476Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:18:33.900523Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-10T12:18:33.900562Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-10T12:18:33.900608Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:18:33.900660Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:18:33.901761Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:33.902730Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:18:33.902770Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:33.902804Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:33.902862Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:33.902895Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-10T12:18:33.902932Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.902974Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:33.903034Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-10T12:18:33.903080Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-10T12:18:33.903114Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:18:33.903151Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-10T12:18:33.903179Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-10T12:18:33.903233Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-10T12:18:33.903292Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-10T12:18:33.903335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:33.903588Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:33.903624Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:18:33.903720Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:18:33.903868Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:18:33.904020Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.906287Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:18:33.906399Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:18:33.906457Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:18:33.906499Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.906535Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:18:33.906576Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:18:33.906622Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-10T12:18:33.906658Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:18:33.906956Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:196:2203], now have 1 active actors on pipe Set pipe for create session: [1:200:2206] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:201:2057] recipient: [1:14:2061] 2026-01-10T12:18:33.907921Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2206], now have 1 active actors on pipe 2026-01-10T12:18:33.908058Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T1 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.547294Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-10T12:23:19.547324Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-10T12:23:19.547358Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 27:0 isTruncatedBlob 0 2026-01-10T12:23:19.547481Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 27:0 isTruncatedBlob 0 hasNonZeroParts 0 isMiddlePartOfMessage 0 2026-01-10T12:23:19.548358Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 50:0 2026-01-10T12:23:19.548561Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 14 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 50 partno 0 count 4294967295 size 4294967295 endOffset 67 max time lag 0ms effective offset 50 2026-01-10T12:23:19.548900Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 14 added 2 blobs, size 5223238 count 17 last offset 61, current partition end offset: 67 2026-01-10T12:23:19.548935Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 14. Send blob request. 2026-01-10T12:23:19.548994Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 50 partno 0 count 11 parts_count 0 source 1 size 3379747 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-10T12:23:19.549030Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 61 partno 0 count 6 parts_count 0 source 1 size 1843491 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-10T12:23:19.549074Z node 159 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 14. All 2 blobs are from cache. 2026-01-10T12:23:19.549126Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' 2026-01-10T12:23:19.549161Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 61 partno 0 count 6 parts 0 suffix '0' 2026-01-10T12:23:19.549224Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-10T12:23:19.549334Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549380Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 52 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549417Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 54 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549475Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 56 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549510Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 58 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549546Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 60 totakecount 11 count 1 size 307240 from pos 0 cbcount 1 2026-01-10T12:23:19.549602Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 61 totakecount 6 count 2 size 614475 from pos 0 cbcount 2 2026-01-10T12:23:19.549678Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-10T12:23:19.549706Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-10T12:23:19.549741Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 50:0 isTruncatedBlob 0 2026-01-10T12:23:19.550028Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 50 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.550301Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 51 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.550536Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 52 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.550776Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 53 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.551033Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 54 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.551291Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 55 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.551551Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 56 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.551809Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 57 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.552094Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 58 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.552382Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 59 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.552643Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 60 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-10T12:23:19.552739Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-10T12:23:19.554933Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000027_00000_0000000023_00000 2026-01-10T12:23:19.555029Z node 159 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-10T12:23:19.555191Z node 159 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-10T12:23:19.555233Z node 159 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:23:19.555347Z node 159 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 50 partNo 0 count 11 size 167 2026-01-10T12:23:19.555383Z node 159 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000027_00000_0000000023_00000(+) to d0000000000_00000000000000000027_00000_0000000023_00000(+) 2026-01-10T12:23:19.559019Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 27 count 23 actorID [159:139:2142] 2026-01-10T12:23:19.559073Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 50 count 11 size 3379747 actorID [159:139:2142] is actual 1 2026-01-10T12:23:19.559154Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 50 count 11 size 167 actorID [159:139:2142] 2026-01-10T12:23:19.559249Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 27 partno 0 count 23 parts 0 suffix '0' size 263 2026-01-10T12:23:19.559305Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 3379747 2026-01-10T12:23:19.560865Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 167 2026-01-10T12:23:19.561116Z node 159 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:23:19.561157Z node 159 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-10T12:23:19.561193Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-10T12:23:19.565689Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:304:2290], now have 1 active actors on pipe 2026-01-10T12:23:19.565811Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-10T12:23:19.565855Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-10T12:23:19.565967Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 61 for user __ydb_compaction_consumer 2026-01-10T12:23:19.566289Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:306:2292], now have 1 active actors on pipe Got start offset = 50 >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup |93.4%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid [GOOD] |93.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] Test command err: 2026-01-10T12:23:11.232660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707443582752166:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.232705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.454666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.492612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.492733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.524765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.595537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.744576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744692Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:11.752438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:12.101789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.239169Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.293528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.299250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.375756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.480020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.524380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.560775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.596002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.626954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.662703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.694850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.724769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.755297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.868777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707452172688368:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:13.868907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:13.869199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707452172688378:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:13.869268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:13.869433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707452172688382:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:13.875200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:13.886492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707452172688384:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:13.988253Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707452172688435:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.642463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.671599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.697455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.722307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.749510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.776652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T1 ... nal Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.088740Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707486205716270:2439]: Pool not found 2026-01-10T12:23:22.089013Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:23:22.272235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707486205716266:2436]: Pool not found 2026-01-10T12:23:22.272433Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:23:22.274911Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593707490500683671:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:23:22.274940Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707490500683670:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.275017Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.275225Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707490500683674:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.275285Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.521993Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707490500683668:2453]: Pool not found 2026-01-10T12:23:22.522269Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:23:22.757667Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707469025844721:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:22.757743Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:23:22.935114Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:51710) incoming connection opened 2026-01-10T12:23:22.935211Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:51710) -> (POST /Root, 52 bytes) 2026-01-10T12:23:22.935324Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38f0:8b0e:847b:0:20f0:8b0e:847b:0] request [SendMessage] url [/Root] database [/Root] requestId: df9a677d-1d872658-d528a654-f67c51f2 2026-01-10T12:23:22.935591Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [df9a677d-1d872658-d528a654-f67c51f2] got new request from [38f0:8b0e:847b:0:20f0:8b0e:847b:0] database '/Root' stream '' 2026-01-10T12:23:22.935978Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-10T12:23:22.936049Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-10T12:23:22.936091Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-10T12:23:22.936120Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-10T12:23:22.936152Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-10T12:23:22.936218Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-10T12:23:22.936250Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-10T12:23:22.942536Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.943446Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.944230Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.945115Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-10T12:23:22.945166Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.945192Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.945242Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:22.945325Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-10T12:23:22.945426Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [df9a677d-1d872658-d528a654-f67c51f2] [auth] Authorized successfully 2026-01-10T12:23:22.945443Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [df9a677d-1d872658-d528a654-f67c51f2] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:22.945804Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [df9a677d-1d872658-d528a654-f67c51f2] Not retrying GRPC response. Code: 400, Error: MissingParameter 2026-01-10T12:23:22.945893Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [df9a677d-1d872658-d528a654-f67c51f2] reply with status: STATUS_UNDEFINED message: No QueueUrl parameter. 2026-01-10T12:23:22.946024Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:51710) <- (400 MissingParameter, 64 bytes) 2026-01-10T12:23:22.946094Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:51710) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"", "MessageBody":"MessageBody-0" } 2026-01-10T12:23:22.946131Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:51710) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: df9a677d-1d872658-d528a654-f67c51f2 Content-Type: application/x-amz-json-1.1 Content-Length: 64 2026-01-10T12:23:22.946231Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:51710) connection closed Http output full {"__type":"MissingParameter","message":"No QueueUrl parameter."} 2026-01-10T12:23:22.946883Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:51718) incoming connection opened 2026-01-10T12:23:22.946950Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:51718) -> (POST /Root, 100 bytes) 2026-01-10T12:23:22.947030Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [78f2:8b0e:847b:0:60f2:8b0e:847b:0] request [SendMessage] url [/Root] database [/Root] requestId: 60a5485-8e1487d3-98e79a74-5c26e706 2026-01-10T12:23:22.947309Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [60a5485-8e1487d3-98e79a74-5c26e706] got new request from [78f2:8b0e:847b:0:60f2:8b0e:847b:0] database '/Root' stream '' 2026-01-10T12:23:22.947606Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [60a5485-8e1487d3-98e79a74-5c26e706] [auth] Authorized successfully 2026-01-10T12:23:22.947633Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [60a5485-8e1487d3-98e79a74-5c26e706] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:22.949082Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [60a5485-8e1487d3-98e79a74-5c26e706] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.NonExistentQueue 2026-01-10T12:23:22.949157Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [60a5485-8e1487d3-98e79a74-5c26e706] reply with status: STATUS_UNDEFINED message: You do not have access or the '/Root/ExampleQueueName' does not exist 2026-01-10T12:23:22.949277Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:51718) <- (400 AWS.SimpleQueueService.NonExistentQueue, 134 bytes) 2026-01-10T12:23:22.949320Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:51718) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/16/ExampleQueueName/13/user_consumer", "MessageBody":"MessageBody-0" } 2026-01-10T12:23:22.949342Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:51718) Response: HTTP/1.1 400 AWS.SimpleQueueService.NonExistentQueue Connection: close x-amzn-requestid: 60a5485-8e1487d3-98e79a74-5c26e706 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-10T12:23:22.949431Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:51718) connection closed Http output full {"__type":"AWS.SimpleQueueService.NonExistentQueue","message":"You do not have access or the '/Root/ExampleQueueName' does not exist"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] Test command err: 2026-01-10T12:23:12.478328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707447503292637:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:12.479293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:12.744654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:12.745499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:12.745587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:12.747550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:12.812497Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:12.814237Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707447503292528:2081] 1768047792467821 != 1768047792467824 2026-01-10T12:23:12.859230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:12.859271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:12.859282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:12.859378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:13.009878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:13.026785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:13.173421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:13.179564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:13.262258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.362146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.397915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.435996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.468284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.482651Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:13.494737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.523282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.548300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.573220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:13.596655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.102210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707460388196062:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:15.102216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707460388196073:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:15.102331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:15.102618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707460388196077:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:15.102716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:15.105734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:15.116314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707460388196076:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:15.202620Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707460388196129:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:15.506999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.530254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.552193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.575469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.599330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.622063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... ist { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.429281Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 15ms 2026-01-10T12:23:22.429537Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.429570Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-10T12:23:22.429636Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 17ms 2026-01-10T12:23:22.429803Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.430890Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.430910Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 17ms 2026-01-10T12:23:22.431186Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.431213Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-10T12:23:22.431326Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 18ms 2026-01-10T12:23:22.431916Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-10T12:23:22.581064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707488234658940:2434]: Pool not found 2026-01-10T12:23:22.581226Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-10T12:23:22.809468Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707488234658969:2439]: Pool not found 2026-01-10T12:23:22.809977Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-10T12:23:22.812154Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707488234659066:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.812234Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.812247Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7593707488234659067:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-10T12:23:22.812484Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707488234659070:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.812551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:22.993673Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7593707488234659064:2453]: Pool not found 2026-01-10T12:23:22.993963Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-10T12:23:23.387636Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707471054787508:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:23.387808Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] Test command err: 2026-01-10T12:23:11.232822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707441326554588:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.232897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.453983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.482005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.482113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.531297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.601260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.739171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:11.744719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744918Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:12.105095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.240660Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.314980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.322180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.420782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.530550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.573924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.617968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.647679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.676809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.713171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.744378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.778829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.810028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.027120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454211458097:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454211458087:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454211458102:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.030330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:14.040168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707454211458101:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:14.114566Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707454211458154:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.642355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.671684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.697594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.722296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.749448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.776440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T1 ... DEBUG: http_proxy_incoming.cpp:97: (#39,[::1]:40530) incoming connection opened 2026-01-10T12:23:23.161210Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#39,[::1]:40530) -> (POST /Root, 100 bytes) 2026-01-10T12:23:23.161319Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38eb:e950:587c:0:20eb:e950:587c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: c489e205-3da875a2-110f377b-2c44de4b 2026-01-10T12:23:23.161656Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [c489e205-3da875a2-110f377b-2c44de4b] got new request from [38eb:e950:587c:0:20eb:e950:587c:0] database '/Root' stream '' 2026-01-10T12:23:23.162107Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [c489e205-3da875a2-110f377b-2c44de4b] [auth] Authorized successfully 2026-01-10T12:23:23.162139Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [c489e205-3da875a2-110f377b-2c44de4b] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:23.162416Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [c489e205-3da875a2-110f377b-2c44de4b] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-10T12:23:23.162482Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [c489e205-3da875a2-110f377b-2c44de4b] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2026-01-10T12:23:23.162570Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#39,[::1]:40530) <- (400 InvalidParameterValue, 81 bytes) 2026-01-10T12:23:23.162627Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#39,[::1]:40530) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":0, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-10T12:23:23.162651Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#39,[::1]:40530) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: c489e205-3da875a2-110f377b-2c44de4b Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-10T12:23:23.162738Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#39,[::1]:40530) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-10T12:23:23.163180Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#39,[::1]:40546) incoming connection opened 2026-01-10T12:23:23.163244Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#39,[::1]:40546) -> (POST /Root, 101 bytes) 2026-01-10T12:23:23.163369Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18ed:4150:587c:0:ed:4150:587c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 385b516e-75a26cf-886919e8-b8f1d80c 2026-01-10T12:23:23.163687Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [385b516e-75a26cf-886919e8-b8f1d80c] got new request from [18ed:4150:587c:0:ed:4150:587c:0] database '/Root' stream '' 2026-01-10T12:23:23.164138Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [385b516e-75a26cf-886919e8-b8f1d80c] [auth] Authorized successfully 2026-01-10T12:23:23.164166Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [385b516e-75a26cf-886919e8-b8f1d80c] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:23.164466Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [385b516e-75a26cf-886919e8-b8f1d80c] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-10T12:23:23.164540Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [385b516e-75a26cf-886919e8-b8f1d80c] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-10T12:23:23.164639Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#39,[::1]:40546) <- (400 InvalidParameterValue, 85 bytes) 2026-01-10T12:23:23.164713Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#39,[::1]:40546) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":50, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-10T12:23:23.164744Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#39,[::1]:40546) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 385b516e-75a26cf-886919e8-b8f1d80c Content-Type: application/x-amz-json-1.1 Content-Length: 85 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-10T12:23:23.164813Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#39,[::1]:40546) connection closed 2026-01-10T12:23:23.165162Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#39,[::1]:40562) incoming connection opened 2026-01-10T12:23:23.165216Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#39,[::1]:40562) -> (POST /Root, 109 bytes) 2026-01-10T12:23:23.165296Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d863:4150:587c:0:c063:4150:587c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: fc9a3bc8-adc3b7c4-479e0c9f-adaaf171 2026-01-10T12:23:23.165574Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [fc9a3bc8-adc3b7c4-479e0c9f-adaaf171] got new request from [d863:4150:587c:0:c063:4150:587c:0] database '/Root' stream '' 2026-01-10T12:23:23.165935Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [fc9a3bc8-adc3b7c4-479e0c9f-adaaf171] [auth] Authorized successfully 2026-01-10T12:23:23.165943Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [fc9a3bc8-adc3b7c4-479e0c9f-adaaf171] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:23.166177Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [fc9a3bc8-adc3b7c4-479e0c9f-adaaf171] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-10T12:23:23.166257Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [fc9a3bc8-adc3b7c4-479e0c9f-adaaf171] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-10T12:23:23.166351Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#39,[::1]:40562) <- (400 InvalidParameterValue, 85 bytes) 2026-01-10T12:23:23.166402Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#39,[::1]:40562) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":2147483647, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-10T12:23:23.166424Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#39,[::1]:40562) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: fc9a3bc8-adc3b7c4-479e0c9f-adaaf171 Content-Type: application/x-amz-json-1.1 Content-Length: 85 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-10T12:23:23.166491Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#39,[::1]:40562) connection closed 2026-01-10T12:23:23.166937Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#39,[::1]:40564) incoming connection opened 2026-01-10T12:23:23.167002Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#39,[::1]:40564) -> (POST /Root, 110 bytes) 2026-01-10T12:23:23.167117Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [3873:4150:587c:0:2073:4150:587c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 198af39a-c450530-5ddd47ce-f123d3d8 2026-01-10T12:23:23.167509Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [198af39a-c450530-5ddd47ce-f123d3d8] got new request from [3873:4150:587c:0:2073:4150:587c:0] database '/Root' stream '' 2026-01-10T12:23:23.167846Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [198af39a-c450530-5ddd47ce-f123d3d8] [auth] Authorized successfully 2026-01-10T12:23:23.167879Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [198af39a-c450530-5ddd47ce-f123d3d8] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:23.168099Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [198af39a-c450530-5ddd47ce-f123d3d8] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-10T12:23:23.168149Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [198af39a-c450530-5ddd47ce-f123d3d8] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2026-01-10T12:23:23.168215Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#39,[::1]:40564) <- (400 InvalidParameterValue, 81 bytes) 2026-01-10T12:23:23.168286Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#39,[::1]:40564) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":-2147483648, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-10T12:23:23.168321Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#39,[::1]:40564) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 198af39a-c450530-5ddd47ce-f123d3d8 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-10T12:23:23.168401Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#39,[::1]:40564) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-10T12:23:23.189213Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:23.189248Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.189271Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:23.189289Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.189307Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:23.289583Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:23.289620Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.289630Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:23.289647Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.289656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence >> TxUsage::WriteToTopic_Demo_38_Query [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] Test command err: 2026-01-10T12:23:11.232739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707442135522590:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.232788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.479274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.479362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.493654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.524269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.570834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.743736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:11.744546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744688Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:12.113505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.240171Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.305737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.313978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.427980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.544426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.582434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.624585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.654418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.686574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.720755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.748994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.785241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.812810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.026898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707455020426087:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.026898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707455020426092:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707455020426102:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.027390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.030578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:14.040928Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707455020426101:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-10T12:23:14.140856Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707455020426154:3098] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.642055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.668697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.698085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.726583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.754486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.782683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T1 ... [0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:23:23.932687Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:23.932838Z node 2 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1768047803977, TxId 281474976710690 2026-01-10T12:23:23.932867Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:23.932880Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:23:23.932889Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:23.932908Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:23:23.933033Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710690 2026-01-10T12:23:23.933063Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:23:23.933072Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:23:23.933084Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:23.933314Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:23:23.934162Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:23:23.934334Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-10T12:23:23.934352Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-10T12:23:23.934433Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:23:23.934457Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:23.934468Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.934477Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:23.934489Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:23.934498Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:23.934516Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:23.934862Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-10T12:23:23.934952Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-10T12:23:23.935067Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:23:23.938017Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:3821 { status: SUCCESS, issues: }consumer 2026-01-10T12:23:23.948527Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:36824) incoming connection opened 2026-01-10T12:23:23.948657Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:36824) -> (POST /Root, 1406 bytes) 2026-01-10T12:23:23.948781Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8ca:fa10:547c:0:a0ca:fa10:547c:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 3c77fedf-48eae8fb-a4b90a2e-97291393 2026-01-10T12:23:23.949609Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [3c77fedf-48eae8fb-a4b90a2e-97291393] got new request from [b8ca:fa10:547c:0:a0ca:fa10:547c:0] database '/Root' stream '' 2026-01-10T12:23:23.949979Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-10T12:23:23.950044Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-10T12:23:23.950073Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-10T12:23:23.950105Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-10T12:23:23.950146Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-10T12:23:23.950179Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-10T12:23:23.950210Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-10T12:23:23.956697Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-10T12:23:23.957216Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.958568Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.959039Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.959534Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.959572Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.959748Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:23.959818Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-10T12:23:23.959921Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [3c77fedf-48eae8fb-a4b90a2e-97291393] [auth] Authorized successfully 2026-01-10T12:23:23.959979Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [3c77fedf-48eae8fb-a4b90a2e-97291393] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:23.960389Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [3c77fedf-48eae8fb-a4b90a2e-97291393] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.TooManyEntriesInBatchRequest 2026-01-10T12:23:23.960523Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [3c77fedf-48eae8fb-a4b90a2e-97291393] reply with status: STATUS_UNDEFINED message: The batch request contains more entries than permissible. 2026-01-10T12:23:23.960690Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:36824) <- (400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest, 134 bytes) 2026-01-10T12:23:23.960780Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:36824) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ { "Id":"Id-1", "MessageGroupId":"MessageGroupId-1", "MessageBody":"MessageBody-1" }, { "Id":"Id-2", "MessageGroupId":"MessageGroupId-2", "MessageBody":"MessageBody-2" }, { "Id":"Id-3", "MessageGroupId":"MessageGroupId-3", "MessageBody":"MessageBody-3" }, { "Id":"Id-4", "MessageGroupId":"MessageGroupId-4", "MessageBody":"MessageBody-4" }, { "Id":"Id-5", "MessageGroupId":"MessageGroupId-5", "MessageBody":"MessageBody-5" }, { "Id":"Id-6", "MessageGroupId":"MessageGroupId-6", "MessageBody":"MessageBody-6" 2026-01-10T12:23:23.960817Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:36824) Response: HTTP/1.1 400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest Connection: close x-amzn-requestid: 3c77fedf-48eae8fb-a4b90a2e-97291393 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-10T12:23:23.960927Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:36824) connection closed Http output full {"__type":"AWS.SimpleQueueService.TooManyEntriesInBatchRequest","message":"The batch request contains more entries than permissible."} 2026-01-10T12:23:24.032507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.032552Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.032565Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.032582Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.032612Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] Test command err: 2026-01-10T12:23:11.234526Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707441717396753:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.234691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.487052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.487160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.513224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.535709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.574697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.576843Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707441717396703:2081] 1768047791228940 != 1768047791228943 2026-01-10T12:23:11.696406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:11.744604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744688Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:12.085064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.092148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:23:12.241408Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.269958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.275300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.344221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.482910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.523603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.560961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.589329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.619144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.648034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.681515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.713366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.752644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.328328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454602300245:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.328336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454602300254:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.328420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.328663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707454602300260:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.328717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.331357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:14.339366Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707454602300259:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:14.404133Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707454602300312:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.806621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.836286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.862963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.890671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.919289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp: ... 832Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.174841Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:23:24.174846Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.174880Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:23:24.174896Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72075186224037907][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:23:24.175244Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:24.175376Z node 2 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1768047804215, TxId 281474976710690 2026-01-10T12:23:24.175395Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.175403Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:23:24.175410Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.175439Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-10T12:23:24.175517Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710690 2026-01-10T12:23:24.175526Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:23:24.175532Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:23:24.175556Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.175748Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:23:24.176666Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:23:24.176854Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-10T12:23:24.176872Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-10T12:23:24.176955Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:23:24.176987Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.176996Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.177004Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.177015Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.177024Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.177036Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:24.177314Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-10T12:23:24.177373Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-10T12:23:24.177446Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:23:24.178336Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:24016 { status: SUCCESS, issues: }consumer 2026-01-10T12:23:24.195442Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:54414) incoming connection opened 2026-01-10T12:23:24.195556Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:54414) -> (POST /Root, 76 bytes) 2026-01-10T12:23:24.195692Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b80c:2b01:457c:0:a00c:2b01:457c:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 7c5da6a-e62216be-a2cafc44-4eddb3cf 2026-01-10T12:23:24.196008Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [7c5da6a-e62216be-a2cafc44-4eddb3cf] got new request from [b80c:2b01:457c:0:a00c:2b01:457c:0] database '/Root' stream '' 2026-01-10T12:23:24.196409Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-10T12:23:24.196514Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-10T12:23:24.196562Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-10T12:23:24.196584Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-10T12:23:24.196605Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-10T12:23:24.196637Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-10T12:23:24.196680Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-10T12:23:24.202471Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.203661Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.204253Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.204284Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.204474Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.205574Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-10T12:23:24.205624Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-10T12:23:24.205713Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-10T12:23:24.205799Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [7c5da6a-e62216be-a2cafc44-4eddb3cf] [auth] Authorized successfully 2026-01-10T12:23:24.205859Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [7c5da6a-e62216be-a2cafc44-4eddb3cf] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:24.206212Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [7c5da6a-e62216be-a2cafc44-4eddb3cf] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.EmptyBatchRequest 2026-01-10T12:23:24.206307Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [7c5da6a-e62216be-a2cafc44-4eddb3cf] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2026-01-10T12:23:24.206440Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:54414) <- (400 AWS.SimpleQueueService.EmptyBatchRequest, 112 bytes) 2026-01-10T12:23:24.206496Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:54414) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ ] } 2026-01-10T12:23:24.206527Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:54414) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: 7c5da6a-e62216be-a2cafc44-4eddb3cf Content-Type: application/x-amz-json-1.1 Content-Length: 112 2026-01-10T12:23:24.206607Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:54414) connection closed Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2026-01-10T12:23:24.274935Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.274969Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.274982Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.275001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.275033Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] >> TxUsage::WriteToTopic_Demo_40_Table >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2026-01-10T12:22:44.870825Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707328019989351:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:44.870974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:45.153048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:45.184777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:45.184896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:45.195184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:45.327550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:45.343160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:45.560795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:45.560833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:45.560840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:45.560909Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:45.885666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:45.902081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:45.972762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:46.148703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:46.449538Z node 1 :PERSQUEUE ERROR: partition_read.cpp:831: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2026-01-10T12:22:46.449647Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2026-01-10T12:22:48.915453Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707345600216351:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:48.915505Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:22:48.944160Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:22:49.053665Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:49.075602Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:22:49.075694Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:22:49.092053Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:22:49.168165Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:22:49.172869Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:22:49.172891Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:22:49.172898Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:22:49.172969Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:22:49.473966Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:22:49.554123Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:22:49.742592Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:22:49.920691Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:22:53.916887Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707345600216351:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:22:53.917039Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:23:04.031885Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:23:04.031917Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:15.982220Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7593707457716655988:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:15.983015Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:15.995553Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:16.088619Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:16.110461Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:16.110551Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:16.138384Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:16.183136Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:16.187944Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:16.187965Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:16.187974Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:16.188051Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:16.413828Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:16.466791Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:16.691241Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:16.988581Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:20.989725Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593707481681533783:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:20.989797Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:21.005185Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:21.111219Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:21.140297Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:21.140404Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:21.150031Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:21.178042Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:21.255602Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:21.255635Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:21.255652Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:21.255752Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:21.481927Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:21.490155Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:21.541712Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:21.780313Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:21.996464Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] Test command err: 2026-01-10T12:23:11.252789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707443181046749:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.252870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.468232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.544726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.544939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.548771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.600423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707443181046720:2081] 1768047791251161 != 1768047791251164 2026-01-10T12:23:11.602409Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.744575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744713Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:11.750511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:12.069044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.259621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.285310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.291609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:23:12.294479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.322598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:23:12.387922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.514196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.551942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.595436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.626860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.665940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.704206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.737028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.767976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.797546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.174938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707456065950268:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.174964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707456065950260:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.175067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.175413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707456065950275:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.175507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.178793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:14.189307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707456065950274:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:14.247014Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707456065950327:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.642364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.671693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.699305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.728274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.756369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... QUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.046367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.046403Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.046414Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.046430Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.046441Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.143870Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 477 2026-01-10T12:23:24.146684Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.146711Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.146722Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.146737Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.146747Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.247080Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.247115Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.247126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.247143Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.247154Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.347430Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.347468Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.347479Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.347498Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.347508Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.447731Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.447754Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.447774Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.447787Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.447796Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.548116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.548142Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.548153Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.548169Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.548177Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.648466Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.648495Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.648507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.648524Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.648534Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.748809Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.748841Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.748853Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.748883Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.748895Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.849137Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.849160Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.849180Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.849197Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.849207Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:24.949489Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:24.949510Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.949518Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:24.949531Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:24.949540Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.049860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.049882Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.049891Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.049906Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.049915Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.144247Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 70 2026-01-10T12:23:25.144417Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [63586b1c-87296b27-4812f24-bb167797] reply ok 2026-01-10T12:23:25.144579Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:53048) <- (200 , 2 bytes) 2026-01-10T12:23:25.144704Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:53048) connection closed Http output full {} 2026-01-10T12:23:25.150223Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.150245Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.150253Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.150263Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.150269Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.5%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:22:59.866132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:22:59.866242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:22:59.866334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:22:59.866412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:22:59.866454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:22:59.866520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:22:59.866619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:22:59.867535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:22:59.868205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:22:59.961263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:59.961379Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:59.973863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:22:59.973967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:22:59.974141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:22:59.983075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:22:59.983332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:22:59.984124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:22:59.984366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:22:59.988986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.989573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:22:59.995404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:22:59.995609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:22:59.995672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:22:59.995732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:22:59.996628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:00.156595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.157995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:00.158681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 23:24.520799Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:23:24.520814Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:23:24.520938Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521016Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521092Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521234Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521333Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521567Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521651Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.521965Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522046Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522203Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522322Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522364Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522434Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522631Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522707Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.522991Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523180Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523240Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523292Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523399Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523446Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.523495Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:23:24.527794Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:24.530793Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:24.530849Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:24.531496Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:24.531545Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:24.531583Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:24.533018Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:783:2751] sender: [5:841:2058] recipient: [5:15:2062] 2026-01-10T12:23:24.570885Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:24.570946Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-10T12:23:24.625344Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-10T12:23:24.625448Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:24.625484Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:24.625679Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:24.625721Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:832:2789], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-10T12:23:24.626172Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 0 2026-01-10T12:23:26.627034Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-10T12:23:26.632402Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-10T12:23:26.640114Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-10T12:23:26.640547Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-10T12:23:26.641214Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:26.641464Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 309us result status StatusSuccess 2026-01-10T12:23:26.642087Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv4lPiwAAGwgR2EbvaFcz\nKbzW2Wb2G3O04SI7Ax/r4npwncDM8wwr4aDiHhDaAZtsbCA11O30LIZ7JzsyOoaR\nSgXYO2wyTfp5PwwOyDGItZAz0zeDPxnBkQHHZShWg/Pb2RRjHJ+2HY5BAkdsu8Y0\nWAH7nQfq89IdAKcqJFFG19zQNQqjTmyDjPDuz06tJ6A8Lu7MIZyrh+nALUXlS0ws\nFHhuSx9rU6GIuNsuaNHpohxDP698lt6QvF7nBO3mCVZxAfrbYL0Cq7mubELrBBwj\nw3whCDrheCEIxit9zedwYr6RkNs+rkxcMxVFX3egEsBDR1+9gSoGPb6slTWqpAdi\n9QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134201960 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuaFYyo4ijRBVwXsl/OQ7\n7IObTQJJAzCSPNvcbSSqBC+7BaD44f6fx8i3pDZ/28tTzd12aJ+EbPPQtVU5A79P\nNguZYnBI8slEdVsI+mZLb2o3vu8f8ja6NZ6MU2uKeuX4xSv/f4gHtmyNXDPsVfAJ\nBF4JFAaYvx54WN9YThuFKB03vv/zYo8DKzmjSBzmZSIUSJQsy86Fv23DjbPgS3Kd\n1YLub92p8XDIYGY1mOEmuNuTKrUGlwY0PVo9qQsnUm0ipyResWu6AAN7vCaOCvkd\ncHA2dT+LOJxex25rx0Y2edVvOM7H9+LZO70gn42lAkVDTdDjPM/VaxDcqIcaoDPv\ngwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134202408 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAopRXnB93CeYECHNJLUtQ\nPB+bR8Xav3qjT/Lt45TuYCPvvBRSblew5TY+lnf22wQhR/Ylga1FZa1SoQUVc+Ji\ntPZbbjSKkvfOKkgjBnNqLyWsE/2ycSIsSts3gOUiYzbGTKvlsMJ45w5p+zUQ00Rl\nxOqzF78DCL7H2F2az/44rRxFaEWIf1acCvnqEKgXsY85pDBwyV53bGKmw/HyeheJ\nZRUOU0Mbnm8DQ7crFMEv2Zk8KzUTsWYJVnlgi6KoBXoirc99YzChsYEz2eIuDY59\nWdBfEb0A3mIIKCBha/um6NhclvXTzi+reqfkHPLJolx1C/MMpp+hDTH7Beg13ijx\nrwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1768134204622 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TContinuousBackupTests::TakeIncrementalBackup >> TContinuousBackupTests::Basic |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes >> TestSetCloudPermissions::CanSetPermissionsForRootDb >> TestSetCloudPermissions::CanSetAllPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] Test command err: 2026-01-10T12:23:11.232823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707440367424335:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:11.233411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:11.456528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:11.466797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:11.466887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:11.476707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:11.621868Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:11.727312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:11.744503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:11.744542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:11.744634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:11.744732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:12.106568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:12.239361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:12.281040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:12.287894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:12.375564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.461911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.498767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.540652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.570169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.603029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.633445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.663425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.694907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:12.726406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.042092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707453252327846:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.042125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707453252327837:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.042242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.042540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707453252327852:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.042655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:14.045532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:14.055213Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707453252327851:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:14.116236Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707453252327904:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:14.642339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.673400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.701409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.731127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.761534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.787384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T1 ... n and tx pending commits 2026-01-10T12:23:25.150941Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.150951Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.251350Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.251385Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.251395Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.251412Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.251421Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.351659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.351687Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.351695Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.351773Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.351783Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.452032Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.452063Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.452088Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.452101Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.452108Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.552353Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.552390Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.552398Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.552410Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.552418Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.652710Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.652766Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.652780Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.652797Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.652807Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.753076Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.753112Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.753124Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.753144Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.753154Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.853384Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.853421Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.853433Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.853449Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.853470Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:25.953753Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:25.953787Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.953798Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:25.953815Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:25.953825Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:26.054134Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:26.054163Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:26.054176Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:26.054193Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:26.054202Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:26.146257Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 1 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 37 2026-01-10T12:23:26.146967Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-10T12:23:26.146992Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-10T12:23:26.147074Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 6 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-10T12:23:26.147223Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 6 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:23:26.147244Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 6. Send blob request. 2026-01-10T12:23:26.147279Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 3 times before, last time 2026-01-10T12:23:23.000000Z 2026-01-10T12:23:26.147304Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 6. All 1 blobs are from cache. 2026-01-10T12:23:26.147323Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:26.147352Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:23:26.147389Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-10T12:23:26.147455Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:26.148091Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [60740ece-cf115bd3-ffcae07f-9c7951b8] reply ok 2026-01-10T12:23:26.148204Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46796) <- (200 , 211 bytes) 2026-01-10T12:23:26.148306Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46796) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1768047803178"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} jsonReceived = { "Messages": [ { "Attributes": { "SentTimestamp":"1768047803178" }, "Body":"MessageBody-0", "MD5OfBody":"94a29778a1f1f41bf68142847b2e6106", "MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125", "ReceiptHandle":"CAAQAA==" } ] } 2026-01-10T12:23:26.154455Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:26.154484Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:26.154494Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:26.154511Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:26.154520Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.5%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_47_Query [GOOD] |93.6%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table [GOOD] |93.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query [GOOD] |93.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> TxUsage::Write_And_Read_Small_Messages_1 >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table >> TSchemeShardAuditSettings::AlterSubdomain >> TSchemeShardAuditSettings::CreateSubdomain |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::CreateExtSubdomain >> TContinuousBackupTests::Basic [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2026-01-10T12:22:18.585787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:18.585851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:18.586602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:18.601920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:18.602371Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:18.602639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:18.653297Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:18.663328Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:18.664010Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:18.665914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:18.666041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:18.666102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:18.666551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:18.666742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:18.666821Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:22:18.753123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:18.788235Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:18.788452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:18.788563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:22:18.788606Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:18.788673Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:18.788722Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.789008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.789083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.789458Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:18.789571Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:18.789646Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.789699Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:18.789737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:18.789786Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:18.789842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:18.789919Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:18.789973Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:18.790082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.790121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.790198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:22:18.797907Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:18.798008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:18.798121Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:18.798340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:18.798430Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:18.798520Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:18.798590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:18.798634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:18.798672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:18.798712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.799044Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:18.799112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:18.799161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:18.799200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.799254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:18.799311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:18.799353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:18.799388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.799417Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:18.811841Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:18.811929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:18.811966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:18.812041Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:18.812179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:18.812779Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.812842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:18.812916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:22:18.813081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:18.813120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:18.813285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:18.813332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:18.813373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:18.813420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:18.817518Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:18.817602Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:18.817861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.817909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:18.817976Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:18.818023Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:18.818065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:18.818113Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:18.818158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ent [32:350:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-10T12:23:29.161052Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161074Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-10T12:23:29.161126Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-10T12:23:29.161151Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161172Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-10T12:23:29.161226Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-10T12:23:29.161248Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161267Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-10T12:23:29.161321Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-10T12:23:29.161344Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161371Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-10T12:23:29.161431Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-10T12:23:29.161452Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161472Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-10T12:23:29.161525Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-10T12:23:29.161545Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161566Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-10T12:23:29.161621Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-10T12:23:29.161644Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161664Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-10T12:23:29.161731Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-10T12:23:29.161753Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161775Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-10T12:23:29.161837Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-10T12:23:29.161858Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161877Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-10T12:23:29.161929Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-10T12:23:29.161949Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.161969Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-10T12:23:29.162019Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-10T12:23:29.162040Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162062Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-10T12:23:29.162141Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-10T12:23:29.162180Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162205Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-10T12:23:29.162268Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-10T12:23:29.162291Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162314Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-10T12:23:29.162367Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-10T12:23:29.162389Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162407Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-10T12:23:29.162472Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-10T12:23:29.162491Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162511Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-10T12:23:29.162565Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-10T12:23:29.162586Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162606Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-10T12:23:29.162660Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-10T12:23:29.162680Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162700Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-10T12:23:29.162768Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-10T12:23:29.162793Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162817Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-10T12:23:29.162886Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-10T12:23:29.162909Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.162932Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-10T12:23:29.162993Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:23:29.163016Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.163037Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 31 23 26 27 29 29 27 25 24 25 23 26 31 27 26 31 31 16 18 17 17 16 29 18 - 18 - - - - - - actual 31 23 26 27 29 29 27 25 24 25 23 26 31 27 26 31 31 16 18 17 17 16 29 18 - 18 - - - - - - interm 2 1 1 0 5 3 3 5 2 5 - - - - 5 6 3 6 3 5 5 - - 3 - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2026-01-10T12:22:19.740537Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:19.741625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:19.741674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:19.752675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:19.753025Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:19.753328Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:19.802962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:19.812749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:19.813375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:19.815083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:19.815160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:19.815227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:19.815604Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:19.815683Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:19.815766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:19.904787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:19.926057Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:19.926213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:19.926369Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:19.926412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:19.926444Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:19.926474Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:19.926638Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:19.926670Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:19.926912Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:19.926996Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:19.927034Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:19.927073Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:19.927098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:19.927129Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:19.927167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:19.927190Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:19.927217Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:19.927286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:19.927309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:19.927349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:19.932464Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:19.932512Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:19.932585Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:19.932740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:19.932788Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:19.932832Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:19.932875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:19.932906Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:19.932930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:19.932952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:19.933207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:19.933236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:19.933268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:19.933302Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:19.933349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:19.933378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:19.933409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:19.933438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:19.933460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:19.945386Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:19.945443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:19.945466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:19.945510Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:19.945563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:19.946051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:19.946109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:19.946154Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:19.946233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:19.946264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:19.946368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:19.946409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:19.946436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:19.946462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:19.949173Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:19.949233Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:19.949412Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:19.949439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:19.949482Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:19.949514Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:19.949539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:19.949573Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:19.949601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [32:350:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-10T12:23:29.008550Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.008582Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-10T12:23:29.008674Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-10T12:23:29.008709Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.008742Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-10T12:23:29.008837Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-10T12:23:29.008872Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.008904Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-10T12:23:29.008991Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-10T12:23:29.009026Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009060Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-10T12:23:29.009152Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-10T12:23:29.009189Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009221Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-10T12:23:29.009331Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-10T12:23:29.009380Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009418Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-10T12:23:29.009557Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-10T12:23:29.009594Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009644Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-10T12:23:29.009711Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-10T12:23:29.009745Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009780Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-10T12:23:29.009873Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-10T12:23:29.009907Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.009945Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-10T12:23:29.010055Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-10T12:23:29.010091Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010123Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-10T12:23:29.010214Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-10T12:23:29.010246Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010280Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-10T12:23:29.010370Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-10T12:23:29.010405Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010439Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-10T12:23:29.010532Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-10T12:23:29.010572Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010606Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-10T12:23:29.010699Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-10T12:23:29.010733Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010765Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-10T12:23:29.010854Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-10T12:23:29.010886Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.010919Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-10T12:23:29.011013Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-10T12:23:29.011063Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.011110Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-10T12:23:29.011214Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-10T12:23:29.011251Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.011284Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-10T12:23:29.011379Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-10T12:23:29.011414Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.011448Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-10T12:23:29.011545Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-10T12:23:29.011580Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.011611Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-10T12:23:29.011719Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:239:2231], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:23:29.011754Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:29.011789Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 31 28 29 26 29 29 28 14 29 28 28 28 27 27 29 29 27 29 28 29 28 29 28 29 5 29 25 29 9 - - - actual 31 28 29 26 29 29 28 14 29 28 28 28 27 27 29 29 27 29 28 29 28 29 28 29 5 29 25 29 9 - - - interm 5 - 5 0 6 - 4 2 5 3 2 1 2 6 - - 3 3 5 0 - 0 3 5 5 - - 3 - - - - >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:28.268241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:28.268350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:28.268395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:28.268431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:28.268467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:28.268517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:28.268588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:28.268676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:28.269549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:28.270892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:28.357682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:28.357740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.374989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:28.375100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:28.377031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:28.386283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:28.387506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:28.390158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:28.393155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:28.399138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:28.400170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:28.412165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:28.412235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:28.412314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:28.412360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:28.412470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:28.412740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:28.556680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.558843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.558996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.559755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... RD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-10T12:23:29.842094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-10T12:23:29.842287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-10T12:23:29.842430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } CommitVersion { Step: 5000041 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-10T12:23:29.843428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:23:29.843479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-10T12:23:29.843648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:23:29.843721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:23:29.843916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 4294970002 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-10T12:23:29.844030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:29.844070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:23:29.844153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:23:29.844203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-10T12:23:29.847207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:23:29.847628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:23:29.847801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:23:29.847869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:23:29.848000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:23:29.848045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:23:29.848117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-10T12:23:29.848175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:23:29.848231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-10T12:23:29.848316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:748:2726] message: TxId: 104 2026-01-10T12:23:29.848371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-10T12:23:29.848427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:23:29.848464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:23:29.848611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:23:29.848665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-10T12:23:29.848688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-10T12:23:29.848737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:23:29.848762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-10T12:23:29.848787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-10T12:23:29.848859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:23:29.849524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:29.849577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:23:29.849650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:23:29.849701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:23:29.849737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:23:29.852024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:23:29.852120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1102:3015] 2026-01-10T12:23:29.852228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-10T12:23:29.852987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:29.853263Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 286us result status StatusPathDoesNotExist 2026-01-10T12:23:29.853482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:23:29.854085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:29.854299Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 255us result status StatusPathDoesNotExist 2026-01-10T12:23:29.854437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] Test command err: 2026-01-10T12:23:14.844047Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707456211490809:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:14.844543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:15.045394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:15.045499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:15.081850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:15.094051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:15.135999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:15.137891Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707456211490702:2081] 1768047794836963 != 1768047794836966 2026-01-10T12:23:15.181299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:15.181322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:15.181337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:15.181418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:15.297142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:15.321440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:15.447696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:15.453084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:15.534123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.662752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.703026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.739746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.767728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.795708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.823468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.845454Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:15.848950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.879691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:15.907849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.270648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707469096394239:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:17.270648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707469096394247:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:17.270774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:17.270998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707469096394254:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:17.271113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:17.274438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:17.282440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707469096394253:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-10T12:23:17.336805Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707469096394306:3101] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:17.715559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.747840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.782695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.819159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.854867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:17.890060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... 10T12:23:29.077498Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 74. All 1 blobs are from cache. 2026-01-10T12:23:29.077531Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:29.077544Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 280 partno 0 count 10 parts 0 suffix '63' 2026-01-10T12:23:29.077601Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 280 totakecount 10 count 10 size 356 from pos 4 cbcount 10 2026-01-10T12:23:29.077699Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:29.078606Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [d6dac104-5a4f4a5e-f9028e29-3c2189de] reply ok 2026-01-10T12:23:29.078762Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:50122) <- (200 , 1208 bytes) 2026-01-10T12:23:29.078850Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:50122) connection closed Http output full {"Messages":[{"MD5OfBody":"fdef397828e382cbd826fa29c1de10d4","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQnAI=","Body":"MessageBody-284","MessageId":"CAF243AE-308A-561C-BF17-1659068910C2"},{"MD5OfBody":"c5ab62c2c62758bcdc00575221cf95d1","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQnQI=","Body":"MessageBody-285","MessageId":"FF425FA9-673C-534C-8CC4-FB08061A4BDB"},{"MD5OfBody":"87ca2c7863c07d0cebeff0918336b083","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQngI=","Body":"MessageBody-286","MessageId":"6C768E3E-5676-5C06-90CC-1BDF44D7061D"},{"MD5OfBody":"430252778030489a7c3d8ca3838d7104","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQnwI=","Body":"MessageBody-287","MessageId":"36308770-7F20-506A-BDDC-9CF75E1C45C4"},{"MD5OfBody":"6cb708942d5ce9a51debb4666964ce37","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQoAI=","Body":"MessageBody-288","MessageId":"D27028A6-C515-5C13-AB3A-DA59700C77F6"},{"MD5OfBody":"7fa6a836149b1c7d27cc8d77b658df0b","Attributes":{"SentTimestamp":"1768047808803"},"ReceiptHandle":"CAAQoQI=","Body":"MessageBody-289","MessageId":"5EDB906A-FC22-50FE-8DFD-BD2858742EA9"}]} 2026-01-10T12:23:29.079560Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:50126) incoming connection opened 2026-01-10T12:23:29.079643Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:50126) -> (POST /Root, 100 bytes) 2026-01-10T12:23:29.079762Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [5827:78c3:47c:0:4027:78c3:47c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 357b4be1-ed8bfc24-491156c0-fb4585ef 2026-01-10T12:23:29.080169Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [357b4be1-ed8bfc24-491156c0-fb4585ef] got new request from [5827:78c3:47c:0:4027:78c3:47c:0] database '/Root' stream '' 2026-01-10T12:23:29.080552Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [357b4be1-ed8bfc24-491156c0-fb4585ef] [auth] Authorized successfully 2026-01-10T12:23:29.080584Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [357b4be1-ed8bfc24-491156c0-fb4585ef] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:29.081126Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1768047814080 VisibilityDeadlineMilliseconds: 1768047839080 MaxNumberOfMessages: 7 2026-01-10T12:23:29.081969Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-10T12:23:29.081997Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-10T12:23:29.082074Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 75 Topic 'topic1' partition 0 user consumer offset 290 partno 0 count 7 size 26214400 endOffset 300 max time lag 0ms effective offset 290 2026-01-10T12:23:29.082235Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 75 added 1 blobs, size 377 count 10 last offset 290, current partition end offset: 300 2026-01-10T12:23:29.082255Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 75. Send blob request. 2026-01-10T12:23:29.082309Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 1 times before, last time 2026-01-10T12:23:28.000000Z 2026-01-10T12:23:29.082341Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 75. All 1 blobs are from cache. 2026-01-10T12:23:29.082373Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:29.082387Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2026-01-10T12:23:29.082449Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 0 cbcount 10 2026-01-10T12:23:29.082554Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:29.083533Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [357b4be1-ed8bfc24-491156c0-fb4585ef] reply ok 2026-01-10T12:23:29.083690Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:50126) <- (200 , 1407 bytes) 2026-01-10T12:23:29.083806Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:50126) connection closed Http output full {"Messages":[{"MD5OfBody":"4ebde9bfeb2c142908b6897295e27d7d","Attributes":{"SentTimestamp":"1768047808809"},"ReceiptHandle":"CAAQogI=","Body":"MessageBody-290","MessageId":"B665CFF5-0D15-5E6E-B795-7F7F031BADFC"},{"MD5OfBody":"00716a52e19ced3758e9add7738a4de6","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQowI=","Body":"MessageBody-291","MessageId":"686D0DEA-9B4B-5B7D-A517-ECDB47DF33C7"},{"MD5OfBody":"6b2cce807faa840ccd5a8f944df80bad","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQpAI=","Body":"MessageBody-292","MessageId":"A9887F98-B0B1-5F36-BEA7-95EBF1DD25E9"},{"MD5OfBody":"c59fd6ecc9a283019c9179d342110fcb","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQpQI=","Body":"MessageBody-293","MessageId":"DCE69C82-8CDA-5A5D-91F0-0AF6FFD574C6"},{"MD5OfBody":"000dd65dc815f7e13c7ab8922f0418be","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQpgI=","Body":"MessageBody-294","MessageId":"46CD018C-4816-5E0E-9483-13F4A15BAB58"},{"MD5OfBody":"042479648840e5a3c4e86196590acb75","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQpwI=","Body":"MessageBody-295","MessageId":"FAE15508-B62A-5219-9518-9937762A66B2"},{"MD5OfBody":"a11e0f7a28004b695b04e3899672981b","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQqAI=","Body":"MessageBody-296","MessageId":"95BA32E8-4312-5A6C-85D8-3477673707AA"}]} 2026-01-10T12:23:29.084616Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:50134) incoming connection opened 2026-01-10T12:23:29.084690Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:50134) -> (POST /Root, 100 bytes) 2026-01-10T12:23:29.084796Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7860:76c3:47c:0:6060:76c3:47c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: ca0a67f4-5c2cb0bd-131224b1-f9c63640 2026-01-10T12:23:29.085107Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [ca0a67f4-5c2cb0bd-131224b1-f9c63640] got new request from [7860:76c3:47c:0:6060:76c3:47c:0] database '/Root' stream '' 2026-01-10T12:23:29.085479Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [ca0a67f4-5c2cb0bd-131224b1-f9c63640] [auth] Authorized successfully 2026-01-10T12:23:29.085485Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [ca0a67f4-5c2cb0bd-131224b1-f9c63640] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:29.086026Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1768047814085 VisibilityDeadlineMilliseconds: 1768047839085 MaxNumberOfMessages: 8 2026-01-10T12:23:29.086922Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-10T12:23:29.086948Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-10T12:23:29.087022Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 76 Topic 'topic1' partition 0 user consumer offset 297 partno 0 count 3 size 26214400 endOffset 300 max time lag 0ms effective offset 297 2026-01-10T12:23:29.087175Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 76 added 1 blobs, size 0 count 3 last offset 290, current partition end offset: 300 2026-01-10T12:23:29.087195Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 76. Send blob request. 2026-01-10T12:23:29.087244Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 2 times before, last time 2026-01-10T12:23:29.000000Z 2026-01-10T12:23:29.087274Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 76. All 1 blobs are from cache. 2026-01-10T12:23:29.087307Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:29.087340Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2026-01-10T12:23:29.087381Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 7 cbcount 10 2026-01-10T12:23:29.087458Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:29.088218Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [ca0a67f4-5c2cb0bd-131224b1-f9c63640] reply ok 2026-01-10T12:23:29.088338Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:50134) <- (200 , 611 bytes) 2026-01-10T12:23:29.088432Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:50134) connection closed Http output full {"Messages":[{"MD5OfBody":"ed83ae0894ecf1dcca98701cefa96b63","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQqQI=","Body":"MessageBody-297","MessageId":"A9F7CE8B-9B04-59FC-8942-BC1375C9CABC"},{"MD5OfBody":"1199cbe1edbbd44c325cfce6309d033e","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQqgI=","Body":"MessageBody-298","MessageId":"42A487A9-840A-5640-8D1B-996F67C88717"},{"MD5OfBody":"5b9e997bca262b61080f0ec85590ea89","Attributes":{"SentTimestamp":"1768047808810"},"ReceiptHandle":"CAAQqwI=","Body":"MessageBody-299","MessageId":"ABB10D4F-48AA-55E2-BEC7-BD08AF90AFDC"}]} batchSizesHistogram (9): 1: 6 2: 7 3: 8 4: 7 5: 7 6: 7 7: 7 8: 6 9: 6 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestGetWithMustRestoreFirst |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] Test command err: 2026-01-10T12:23:28.085414Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707515210985348:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:28.085480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:28.352823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:28.375974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:28.376059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:28.384195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:28.444467Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707515210985301:2081] 1768047808078402 != 1768047808078405 2026-01-10T12:23:28.456873Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.487971Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-10T12:23:28.488027Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c77f06f07d0] Connect to grpc://localhost:23559 2026-01-10T12:23:28.491680Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c77f06f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-10T12:23:28.508206Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c77f06f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:23:28.509534Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:23:28.509741Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-01-10T12:23:28.526092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> ReadOnlyVDisk::TestWrites >> ReadOnlyVDisk::TestGarbageCollect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] Test command err: 2026-01-10T12:23:28.085894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707515679723562:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:28.085945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:28.354520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:28.354653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:28.390182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:28.402993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:28.435141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.436496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707515679723511:2081] 1768047808078353 != 1768047808078356 2026-01-10T12:23:28.508839Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.clusters.get ydb.clusters.manage ydb.clusters.monitor) 2026-01-10T12:23:28.508939Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d29eb0f07d0] Connect to grpc://localhost:10479 2026-01-10T12:23:28.513340Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d29eb0f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.monitor" } } result_filter: ALL_FAILED } 2026-01-10T12:23:28.525425Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d29eb0f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:23:28.525874Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:23:28.526022Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-01-10T12:23:28.542151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] Test command err: 2026-01-10T12:23:28.085051Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707517254213274:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:28.085746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:28.372849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:28.372963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:28.408152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:28.410064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707517254213225:2081] 1768047808078311 != 1768047808078314 2026-01-10T12:23:28.422044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:28.423292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.493817Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.developerApi.update ydb.tables.write ydb.databases.create ydb.databases.connect ydb.developerApi.get ydb.tables.select) 2026-01-10T12:23:28.493901Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce27e4003d0] Connect to grpc://localhost:9404 2026-01-10T12:23:28.498090Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce27e4003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-10T12:23:28.509329Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce27e4003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:23:28.509667Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:23:28.509786Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-01-10T12:23:28.633922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] Test command err: 2026-01-10T12:23:28.085883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707514824188653:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:28.085997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:28.338306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:28.360400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:28.360538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:28.365413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:28.410487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.410565Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707514824188602:2081] 1768047808078292 != 1768047808078295 2026-01-10T12:23:28.487453Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-10T12:23:28.487532Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3add2003d0] Connect to grpc://localhost:24679 2026-01-10T12:23:28.491293Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3add2003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.databases.l...(truncated) } 2026-01-10T12:23:28.508196Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3add2003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:23:28.509184Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:23:28.509307Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root, user: user1@as, from ip: 2026-01-10T12:23:28.581977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:30.567599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:30.567708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:30.567792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:30.567857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:30.567892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:30.567974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.568104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:30.569079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:30.577619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:30.650144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:30.650203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:30.663988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:30.664091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:30.666540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:30.676616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:30.676902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:30.681877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:30.685344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:30.691803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.692827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:30.699953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:30.700125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:30.700307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:30.701093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.842115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.843853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:665: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2026-01-10T12:23:31.678541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-10T12:23:31.678996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:31.679092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:31.679145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-10T12:23:31.679194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:31.679225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:23:31.679355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 130 2026-01-10T12:23:31.679542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:31.679602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:23:31.680824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.682152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2026-01-10T12:23:31.683162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:31.683202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:31.683349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:23:31.683491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:31.683538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-10T12:23:31.683590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-10T12:23:31.683991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-10T12:23:31.684036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2026-01-10T12:23:31.684151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-10T12:23:31.684209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.684249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-10T12:23:31.684276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.684309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2026-01-10T12:23:31.684353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.684381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-10T12:23:31.684409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-10T12:23:31.684473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:23:31.684506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2026-01-10T12:23:31.684535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 30 2026-01-10T12:23:31.684562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-10T12:23:31.685139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.685228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.685274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:23:31.685309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-10T12:23:31.685344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:31.686219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.686309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.686345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:23:31.686385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-10T12:23:31.686423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:23:31.686497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-10T12:23:31.687204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:31.687268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:23:31.687355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:23:31.687617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:31.687654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:23:31.687712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:31.689601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.691580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.691667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:31.691731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-10T12:23:31.692112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-10T12:23:31.692152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-10T12:23:31.692695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-10T12:23:31.692793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:23:31.692826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1073:3063] TestWaitNotification: OK eventTxId 112 |93.6%| [TA] $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] Test command err: 2026-01-10T12:23:13.607978Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707448818524246:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:13.612997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:13.814320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:13.814436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:13.860525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:13.866132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:13.918060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:13.919265Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707448818524215:2081] 1768047793605295 != 1768047793605298 2026-01-10T12:23:13.964729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:13.964753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:13.964760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:13.964873Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:14.104727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:14.167504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:14.322065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:23:14.326880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:14.385926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.470974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.504909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.542770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.571078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.601557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.617615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:14.630030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.659165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.691188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:14.720220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.370344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707461703427753:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:16.370344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707461703427761:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:16.370422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:16.370611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707461703427768:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:16.370640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:16.374580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:16.384194Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707461703427767:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-10T12:23:16.473719Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707461703427820:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:16.802094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.825442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.848512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.876449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.902372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:16.927829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... fset 0 2026-01-10T12:23:30.841459Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:23:30.841458Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:23:30.841467Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 4. Send blob request. 2026-01-10T12:23:30.841497Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:30.841512Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:47630) <- (200 , 127 bytes) 2026-01-10T12:23:30.841604Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-10T12:23:30.841628Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:47630) connection closed 2026-01-10T12:23:30.841676Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 1 times before, last time 2026-01-10T12:23:30.000000Z 2026-01-10T12:23:30.841694Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. Http output full {"SequenceNumber":"0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"} 2026-01-10T12:23:30.841720Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1768047810837 queuesize 0 startOffset 0 2026-01-10T12:23:30.841754Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:23:30.841757Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:30.841795Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-10T12:23:30.841842Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:30.842371Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:47636) incoming connection opened 2026-01-10T12:23:30.842433Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:47636) -> (POST /Root, 74 bytes) 2026-01-10T12:23:30.842589Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [78a7:19f4:127c:0:60a7:19f4:127c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: c1fe4483-141dc305-ec72af34-db214938 2026-01-10T12:23:30.842981Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [c1fe4483-141dc305-ec72af34-db214938] got new request from [78a7:19f4:127c:0:60a7:19f4:127c:0] database '/Root' stream '' 2026-01-10T12:23:30.843366Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [c1fe4483-141dc305-ec72af34-db214938] [auth] Authorized successfully 2026-01-10T12:23:30.843399Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [c1fe4483-141dc305-ec72af34-db214938] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:30.844298Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1768047830844 VisibilityDeadlineMilliseconds: 1768047840844 MaxNumberOfMessages: 1 2026-01-10T12:23:30.845110Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-10T12:23:30.845129Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-10T12:23:30.845180Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-10T12:23:30.845308Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-10T12:23:30.845325Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-10T12:23:30.845355Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 2 times before, last time 2026-01-10T12:23:30.000000Z 2026-01-10T12:23:30.845373Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-10T12:23:30.845409Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-10T12:23:30.845431Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-10T12:23:30.845470Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-10T12:23:30.845517Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-10T12:23:30.846282Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [c1fe4483-141dc305-ec72af34-db214938] reply ok 2026-01-10T12:23:30.846404Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:47636) <- (200 , 211 bytes) 2026-01-10T12:23:30.846499Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:47636) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1768047810837"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} 2026-01-10T12:23:30.847186Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:47650) incoming connection opened 2026-01-10T12:23:30.847251Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:47650) -> (POST /Root, 80 bytes) 2026-01-10T12:23:30.847377Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d803:1bf4:127c:0:c003:1bf4:127c:0] request [DeleteMessage] url [/Root] database [/Root] requestId: 381395e5-e5c3e53d-80dc97e0-46356664 2026-01-10T12:23:30.847709Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessage] requestId [381395e5-e5c3e53d-80dc97e0-46356664] got new request from [d803:1bf4:127c:0:c003:1bf4:127c:0] database '/Root' stream '' 2026-01-10T12:23:30.848069Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessage] requestId [381395e5-e5c3e53d-80dc97e0-46356664] [auth] Authorized successfully 2026-01-10T12:23:30.848097Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessage] requestId [381395e5-e5c3e53d-80dc97e0-46356664] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-10T12:23:30.848756Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 0 2026-01-10T12:23:30.849612Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:30.849640Z node 3 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-10T12:23:30.849662Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-10T12:23:30.849672Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:30.849686Z node 3 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-10T12:23:30.849724Z node 3 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 1 (startOffset 0) session 2026-01-10T12:23:30.849735Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-10T12:23:30.849745Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-10T12:23:30.849758Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:30.849907Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:23:30.850384Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessage] requestId [381395e5-e5c3e53d-80dc97e0-46356664] reply ok 2026-01-10T12:23:30.850385Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:23:30.850431Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 1 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-10T12:23:30.850465Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:23:30.850488Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:30.850488Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:47650) <- (200 , 2 bytes) 2026-01-10T12:23:30.850499Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:30.850508Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:30.850519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:30.850529Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-10T12:23:30.850549Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:30.850575Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:47650) connection closed Http output full {} 2026-01-10T12:23:30.910034Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:30.910061Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:30.910068Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:30.910082Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:30.910095Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:129:2154] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:30.567597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:30.567682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:30.567744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:30.567797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:30.567841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:30.567886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:30.568701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:30.577631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:30.649717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:30.649780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:30.660539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:30.664056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:30.666536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:30.675434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:30.676914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:30.681881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:30.685375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:30.693047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.693215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:30.700046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:30.700288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:30.700405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:30.701129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.859333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.860782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-10T12:23:31.641147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:31.641252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:31.641301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-10T12:23:31.641379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:31.641409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:23:31.641442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 134 2026-01-10T12:23:31.642543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.643917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.645064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-10T12:23:31.645119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:23:31.645236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 134 -> 135 2026-01-10T12:23:31.645424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:31.645488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 FAKE_COORDINATOR: Erasing txId 112 2026-01-10T12:23:31.647143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:31.647188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:31.647334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:23:31.647461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:31.647501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-10T12:23:31.647554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-10T12:23:31.647908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-10T12:23:31.647954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 112:0 ProgressState 2026-01-10T12:23:31.647988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 135 -> 240 2026-01-10T12:23:31.648956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.649049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.649081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:23:31.649117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-10T12:23:31.649152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:31.649756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.649833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-10T12:23:31.649862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-10T12:23:31.649921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-10T12:23:31.649959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:23:31.650026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2026-01-10T12:23:31.652439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-10T12:23:31.652499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 112:0 ProgressState 2026-01-10T12:23:31.652585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-10T12:23:31.652614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.652649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-10T12:23:31.652674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.652710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2026-01-10T12:23:31.652747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-10T12:23:31.652797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-10T12:23:31.652826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-10T12:23:31.652886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:23:31.653481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:31.653532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:23:31.653606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:23:31.653835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:31.653870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:23:31.653928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:31.654764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.655153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-10T12:23:31.657349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:31.657447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-10T12:23:31.657820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-10T12:23:31.657856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-10T12:23:31.658403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-10T12:23:31.658485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:23:31.658518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1070:3060] TestWaitNotification: OK eventTxId 112 |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TxUsage::ReadRuleGeneration |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.6%| [TA] $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:28.268243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:28.268367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:28.268433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:28.268485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:28.268523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:28.268556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:28.268616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:28.268709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:28.269632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:28.270850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:28.369953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:28.370013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:28.386272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:28.386372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:28.386569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:28.395103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:28.395362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:28.396147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:28.396406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:28.399478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:28.400182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:28.411263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:28.411367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:28.411458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:28.411503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:28.411683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:28.412526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:28.571553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.572759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.572898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:28.573969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... l shard schema changes has been received, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.829278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710757:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:23:30.829326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710757:0 129 -> 240 2026-01-10T12:23:30.831438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.832623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.832806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.832860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-10T12:23:30.832965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-10T12:23:30.833017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:23:30.833081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-10T12:23:30.833114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:23:30.833155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-10T12:23:30.833229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1385:3205] message: TxId: 281474976710757 2026-01-10T12:23:30.833285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-10T12:23:30.833332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-10T12:23:30.833369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-10T12:23:30.833503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:23:30.833563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-10T12:23:30.833593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-10T12:23:30.833634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:23:30.833664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-10T12:23:30.833683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-10T12:23:30.833752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:23:30.834300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:30.834353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:23:30.834429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:23:30.834489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:23:30.834533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:23:30.836798Z node 1 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-10T12:23:30.836919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:32.876942Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:32.877201Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 262us result status StatusPathDoesNotExist 2026-01-10T12:23:32.877391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:23:32.877833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:32.878035Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 181us result status StatusPathDoesNotExist 2026-01-10T12:23:32.878144Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:23:32.878626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:32.878783Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 170us result status StatusSuccess 2026-01-10T12:23:32.879112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:30.567604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:30.567713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:30.567811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:30.567867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:30.567901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:30.567996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.568109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:30.569043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:30.577629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:30.668953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:30.669019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:30.685507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:30.685624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:30.685855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:30.694633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:30.694883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:30.695588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:30.695844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:30.698103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.698267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:30.699993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:30.700201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:30.700320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:30.701143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.868886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.873988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.874075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.874184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.874270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.874352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.874453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 65: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2026-01-10T12:23:33.467511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:23:33.468255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Add transaction: 175 at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-10T12:23:33.468617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:33.468727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:33.468777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-10T12:23:33.468829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:33.468871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-10T12:23:33.468999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 130 2026-01-10T12:23:33.469204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:33.469268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-10T12:23:33.469752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2026-01-10T12:23:33.474912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:33.474961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:33.475125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-10T12:23:33.475246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:33.475283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-10T12:23:33.475325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-10T12:23:33.475431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-10T12:23:33.475475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2026-01-10T12:23:33.475562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:33.475597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:33.475635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:33.475668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:33.475700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2026-01-10T12:23:33.475736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:33.475768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-10T12:23:33.475813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-10T12:23:33.475889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-10T12:23:33.475927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2026-01-10T12:23:33.475958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 106 2026-01-10T12:23:33.475987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 62], 18446744073709551615 2026-01-10T12:23:33.477091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:33.477189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:33.477228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:33.477264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-10T12:23:33.477300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:33.478752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:33.478837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:33.478867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:33.478899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-10T12:23:33.478932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-10T12:23:33.479020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2026-01-10T12:23:33.479310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:33.479363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:33.479471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-10T12:23:33.479944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:33.479987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:33.480051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:33.482072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:33.484188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:33.484301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:33.484383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-10T12:23:33.485863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-10T12:23:33.485911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-10T12:23:33.487318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-10T12:23:33.487421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-10T12:23:33.487457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2882:4871] TestWaitNotification: OK eventTxId 175 |93.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TSyncNeighborsTests::SerDes3 [GOOD] >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncNeighborsTests::SerDes2 [GOOD] >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2026-01-10T12:23:34.763948Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.844303Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.844455Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 0 waiting# 0 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2026-01-10T12:23:34.745016Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.745702Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:51: TEvQuerySyncToken, token sent (1): VDisk actor id# [0:1:1] actor id# [1:6:2053] active# 1 waiting# 0 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2026-01-10T12:23:34.745049Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.745655Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-10T12:23:34.745712Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:111: ProcessQueue(), token sent: VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 0 waiting# 1 2026-01-10T12:23:34.745745Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:122: ProcessQueue() done: active# 1 waiting# 0 2026-01-10T12:23:34.745805Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.859294Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-10T12:23:34.859458Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-10T12:23:34.859532Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:161: TEvReleaseSyncToken, removed from queue: VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 0 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 11883256386162062696 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-10T12:23:33.440995Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-10T12:23:33.445740Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-10T12:23:33.450777Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-10T12:23:33.453073Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-10T12:23:33.459194Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-10T12:23:33.461421Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-10T12:23:33.463630Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-10T12:23:33.465659Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-10T12:23:34.922158Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.922245Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:34.922322Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.922831Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [ab8599bddf624d8e] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-10T12:23:34.926117Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.314 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.315 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.315 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.315 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.315 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.315 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 0.907 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 0.936 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.027 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.045 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.063 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-10T12:23:34.927492Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.927802Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.928608Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-10T12:23:34.929766Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.930252Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.931013Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { Order ... OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2026-01-10T12:23:34.937692Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.937836Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:34.937875Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2026-01-10T12:23:34.939292Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.939424Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.939488Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2026-01-10T12:23:34.941090Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.941231Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:34.941283Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2026-01-10T12:23:34.942866Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.942942Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:34.943012Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2026-01-10T12:23:34.946603Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:34.946727Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:34.946765Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:34.947130Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [5df296e336730530] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2026-01-10T12:23:34.947350Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: {BPG72@dsproxy_get.cpp:427} Query history GroupId# 2181038080 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.321 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.322 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGet{ TimestampMs# 0.322 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGet{ TimestampMs# 0.322 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVGet{ TimestampMs# 0.323 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVGet{ TimestampMs# 0.323 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVGet{ TimestampMs# 0.323 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVGet{ TimestampMs# 0.323 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVGetResult{ TimestampMs# 1.8 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 2.211 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVGetResult{ TimestampMs# 2.236 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVGetResult{ TimestampMs# 2.253 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVGetResult{ TimestampMs# 2.271 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVGetResult{ TimestampMs# 2.465 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPut{ TimestampMs# 2.488 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 2.488 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 2.489 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGetResult{ TimestampMs# 2.906 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# OK } TEvVPut{ TimestampMs# 2.91 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGetResult{ TimestampMs# 2.934 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# OK } TEvVPut{ TimestampMs# 2.936 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.954 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } PutAcceleration{ TimestampMs# 2.957 } PutAcceleration{ TimestampMs# 2.972 } TEvVPutResult{ TimestampMs# 3.099 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPutResult{ TimestampMs# 3.238 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } 2026-01-10T12:23:34.947429Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:34.947468Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:31.968342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:31.968435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:31.968472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:31.968507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:31.968546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:31.968576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:31.968673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:31.968756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:31.969644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:31.969943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:32.061439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:32.061498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:32.077401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:32.077499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:32.077725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:32.085754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:32.086004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:32.086691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:32.086901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:32.089443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:32.089565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:32.090556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:32.090602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:32.090647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:32.090684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:32.090768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:32.090965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:32.235712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.236726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.236861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.236979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:32.237849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-10T12:23:34.833343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:34.833452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:34.833496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-10T12:23:34.833566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:34.833600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-10T12:23:34.833630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 134 2026-01-10T12:23:34.834600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:34.834723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:34.836466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-10T12:23:34.836517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:23:34.836612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 134 -> 135 2026-01-10T12:23:34.836764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:34.836818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 FAKE_COORDINATOR: Erasing txId 175 2026-01-10T12:23:34.838994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:34.839037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:34.839194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-10T12:23:34.839306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:34.839351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-10T12:23:34.839398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-10T12:23:34.839694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-10T12:23:34.839733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 175:0 ProgressState 2026-01-10T12:23:34.839775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 135 -> 240 2026-01-10T12:23:34.840505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:34.840583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:34.840609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:34.840638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-10T12:23:34.840690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:34.841366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:34.841441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:34.841469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:34.841494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-10T12:23:34.841518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-10T12:23:34.841576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-10T12:23:34.843377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-10T12:23:34.843415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-10T12:23:34.843465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:34.843482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:34.843502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:34.843528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:34.843547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-10T12:23:34.843566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:34.843586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-10T12:23:34.843603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-10T12:23:34.843642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-10T12:23:34.844278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:34.844310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:34.844346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-10T12:23:34.844544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:34.844585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:34.844628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:34.845054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:34.845116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:34.846611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:34.846670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-10T12:23:34.847510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-10T12:23:34.847538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-10T12:23:34.848377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-10T12:23:34.848448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-10T12:23:34.848467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:3045:5033] TestWaitNotification: OK eventTxId 175 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> BsControllerConfig::PDiskCreate >> BsControllerConfig::SelectAllGroups >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::AddDriveSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2026-01-10T12:23:35.662721Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-10T12:23:35.662839Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-10T12:23:35.752532Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-10T12:23:35.752669Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-10T12:23:35.752741Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:82: TEvQuerySyncToken, enqueued (1): VDisk actor id# [0:1:2] actor id# [2:7:2054] active# 1 waiting# 1 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::DeleteStoragePool >> BsControllerConfig::SoleCommandRollback >> BsControllerConfig::MergeIntersectingBoxes |93.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::SoleCommandRollback [GOOD] >> BsControllerConfig::UnsupportedCommandError >> ReadOnlyVDisk::TestWrites [GOOD] >> ReadOnlyVDisk::TestDiscover [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> BsControllerConfig::UnsupportedCommandError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 12390139686386055692 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-10T12:23:33.726553Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-10T12:23:34.072013Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2026-01-10T12:23:34.073324Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-10T12:23:34.366797Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2026-01-10T12:23:34.368022Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2026-01-10T12:23:34.368809Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2026-01-10T12:23:34.369128Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [43daf1cf86606f87] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-10T12:23:34.370232Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.5 sample PartId# [1:1:5:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.501 sample PartId# [1:1:5:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.501 sample PartId# [1:1:5:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.501 sample PartId# [1:1:5:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.502 sample PartId# [1:1:5:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.502 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 1.753 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.815 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.603 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.653 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 2.897 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.918 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.933 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.951 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.981 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 3.16 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 3413953147036026759 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-10T12:23:33.721531Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-10T12:23:33.727919Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-10T12:23:33.733849Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-10T12:23:33.737606Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-10T12:23:33.747265Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-10T12:23:33.750550Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-10T12:23:33.753933Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-10T12:23:33.757045Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-10T12:23:34.846372Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.846468Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:34.846568Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.847329Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [3363b74fb5a38336] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-10T12:23:34.848701Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.393 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.393 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.393 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.394 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.394 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.394 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.189 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.233 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.353 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.381 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.41 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-10T12:23:34.850305Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.850422Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.851298Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-10T12:23:34.852809Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5323:704] 2026-01-10T12:23:34.853472Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:34.854096Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNu ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2026-01-10T12:23:36.770806Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.770934Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2026-01-10T12:23:36.773796Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:36.774946Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2026-01-10T12:23:36.778606Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2026-01-10T12:23:36.780940Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.781013Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2026-01-10T12:23:36.783174Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.783258Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2026-01-10T12:23:36.785730Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.785803Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2026-01-10T12:23:36.788212Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] 2026-01-10T12:23:36.788436Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2026-01-10T12:23:36.790847Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.790938Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2026-01-10T12:23:36.793127Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5337:718] 2026-01-10T12:23:36.793261Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5330:711] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::UnsupportedCommandError [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:29:2057] recipient: [1:23:2068] 2026-01-10T12:23:36.353109Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.364044Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.367753Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.368282Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.371045Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.371831Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371861Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.372005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.381760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.381966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.384139Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.384299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384462Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:49:2057] recipient: [1:11:2058] 2026-01-10T12:23:36.400230Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.400355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.422687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.422844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.422934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.423019Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.423160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.423261Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.423318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.423387Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.434219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.434378Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.445188Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.445480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:36.449798Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:36.449866Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:36.450097Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:36.452187Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:36.476357Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: CYAN } } } Status { Success: true } Success: true 2026-01-10T12:23:36.488508Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: YELLOW } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-10T12:23:36.510916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } Status { Success: true BaseConfig { Node { NodeId: 1 PhysicalLocation: "\n\001\000\000\000\024\001\000\000\000\036\001\000\000\000(\001\000\000\000" HostKey { Fqdn: "::1" IcPort: 12001 } Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Settings { DefaultMaxSlots: 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:29:2057] recipient: [2:23:2068] 2026-01-10T12:23:36.639727Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.640813Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.641107Z node 2 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.641672Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.642998Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.643209Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.643235Z node 2 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.643439Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.653522Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.653687Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.653839Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.653959Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.654072Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.654179Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:49:2057] recipient: [2:11:2058] 2026-01-10T12:23:36.665510Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.665751Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.686990Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.687178Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.687295Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.687392Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.687515Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.687570Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.687643Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.687701Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.698399Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.698532Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.709275Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEna ... 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:29:2057] recipient: [3:23:2068] 2026-01-10T12:23:36.932125Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.933109Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.933360Z node 3 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.934759Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.935116Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.935393Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.935429Z node 3 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.935610Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.945055Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.945189Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.945303Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.945410Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.945499Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.945590Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:49:2057] recipient: [3:11:2058] 2026-01-10T12:23:36.956899Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.957054Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.978305Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.978438Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.978539Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.978609Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.978716Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.978779Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.978832Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.978891Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.989615Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.989759Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.000535Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.000687Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:37.002119Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:37.002182Z node 3 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:37.002390Z node 3 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:37.002435Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:37.003038Z node 3 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:29:2057] recipient: [4:24:2068] 2026-01-10T12:23:37.190765Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:37.191428Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:37.191608Z node 4 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:37.192476Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:37.192822Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:37.192891Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:37.192909Z node 4 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:37.193030Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:37.199692Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:37.199787Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:37.199864Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:37.199953Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:37.200020Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:37.200108Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:49:2057] recipient: [4:11:2058] 2026-01-10T12:23:37.211090Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:37.211197Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.232183Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.232269Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.232335Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.232401Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.232486Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.232535Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.232602Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.232636Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.243136Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.243225Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.253761Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.253867Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:37.254786Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:37.254830Z node 4 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:37.254970Z node 4 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:37.254999Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:37.255553Z node 4 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::Basic >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table [GOOD] >> BsControllerConfig::SelectAllGroups [GOOD] >> BsControllerConfig::SoleCommandErrorWhenCombined >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] Test command err: 2026-01-10T12:23:36.353364Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.364099Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.367748Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.370566Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.370972Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.371211Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371240Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.382079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.382177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.384153Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.384315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.619957Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.166933s 2026-01-10T12:23:36.620140Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.167161s Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:23:2068] Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:29:2057] recipient: [11:23:2068] 2026-01-10T12:23:38.545546Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:38.546296Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:38.546476Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:38.547561Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:38.547763Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:38.547924Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.547951Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.548121Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:38.555919Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:38.556024Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:38.556148Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:38.556241Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.556304Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.556354Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:49:2057] recipient: [11:11:2058] 2026-01-10T12:23:38.567466Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:38.567593Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.588951Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.589112Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.589261Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.589357Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.589474Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.589524Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.589559Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.589609Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.600368Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.600541Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.611309Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.611473Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:38.612816Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:38.612880Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:38.613029Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:38.613068Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:38.613611Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:29:2057] recipient: [12:23:2068] 2026-01-10T12:23:38.836766Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:38.837788Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:38.838066Z node 12 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:38.839381Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:38.839868Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:38.840184Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.840219Z node 12 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.840422Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:38.850652Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:38.850807Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:38.850935Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:38.851067Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.851160Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.851228Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:49:2057] recipient: [12:11:2058] 2026-01-10T12:23:38.862744Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:38.862925Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.884381Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.884501Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.884568Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.884634Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.884728Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.884775Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.884800Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.884831Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.895487Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.895610Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.906212Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.906320Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:38.907386Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:38.907427Z node 12 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:38.907603Z node 12 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:38.907653Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:38.908289Z node 12 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> BsControllerConfig::PDiskCreate [GOOD] >> BsControllerConfig::ReassignGroupDisk >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs >> ReadOnlyVDisk::TestReads [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TExportToS3Tests::IndexMaterializationDisabled |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:30.567602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:30.567684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:30.567751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:30.567809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:30.567851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:30.567924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:30.567994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:30.568751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:30.577660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:30.674180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:30.674255Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:30.690702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:30.690848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:30.691078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:30.698955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:30.699194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:30.699815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:30.700050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:30.701957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.702133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:30.703044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:30.703088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:30.703134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:30.703180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:30.703283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:30.703464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:30.843462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.844941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.845002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.845080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:30.845168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 375534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 175:0 ProgressState 2026-01-10T12:23:39.375572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 135 -> 240 2026-01-10T12:23:39.376361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:39.376451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:39.376477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:39.376502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-10T12:23:39.376529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:39.377133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:39.377189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-10T12:23:39.377207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-10T12:23:39.377229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-10T12:23:39.377250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 6 2026-01-10T12:23:39.377301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-10T12:23:39.378926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-10T12:23:39.379012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:23:39.379046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:23:39.379075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:23:39.379267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-10T12:23:39.379299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-10T12:23:39.379362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:39.379384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:39.379409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-10T12:23:39.379428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:39.379452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-10T12:23:39.379476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-10T12:23:39.379503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-10T12:23:39.379522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-10T12:23:39.379648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 5 2026-01-10T12:23:39.380469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:39.381235Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 Forgetting tablet 72075186233409619 2026-01-10T12:23:39.382044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2026-01-10T12:23:39.382267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 4 2026-01-10T12:23:39.382505Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 2026-01-10T12:23:39.383624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:39.403894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2026-01-10T12:23:39.404177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-10T12:23:39.404790Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 Forgetting tablet 72075186233409618 2026-01-10T12:23:39.406347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2026-01-10T12:23:39.406535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 Forgetting tablet 72075186233409620 2026-01-10T12:23:39.407072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-10T12:23:39.407628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:39.407665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:39.407779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-10T12:23:39.408748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:39.408793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-10T12:23:39.408845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:23:39.411180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:74 2026-01-10T12:23:39.411216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2026-01-10T12:23:39.411273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:73 2026-01-10T12:23:39.411291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2026-01-10T12:23:39.411438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:75 2026-01-10T12:23:39.411479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2026-01-10T12:23:39.412916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:39.412996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-10T12:23:39.414262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-10T12:23:39.414295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-10T12:23:39.415305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-10T12:23:39.415381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-10T12:23:39.415406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:8292:9320] TestWaitNotification: OK eventTxId 175 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::RebootDuringCompletion |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 15244600506223278829 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-10T12:23:33.548505Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-10T12:23:33.552566Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-10T12:23:34.328251Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:34.329048Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-10T12:23:34.783936Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:34.784182Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-10T12:23:35.100543Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:35.101356Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:35.102061Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:35.102270Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [90e2387d56eab2a7] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-10T12:23:35.103543Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.451 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.452 sample PartId# [1:1:4:0:0:131072:5] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.452 sample PartId# [1:1:4:0:0:131072:4] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.452 sample PartId# [1:1:4:0:0:131072:3] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.453 sample PartId# [1:1:4:0:0:131072:2] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.453 sample PartId# [1:1:4:0:0:131072:1] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 0.923 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 0.952 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 1.871 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.903 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.109 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.122 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.13 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.141 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.159 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 2.251 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2026-01-10T12:23:35.444736Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:35.444889Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:35.444933Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-10T12:23:36.161832Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:36.162071Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:36.162139Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:36.162194Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-10T12:23:36.447476Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:36.447676Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:36.447720Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:36.447769Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:36.447806Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-10T12:23:36.719782Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:36.720011Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:36.720093Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:36.720149Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:36.720202Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:36.720256Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-10T12:23:36.905422Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2026-01-10T12:23:36.905576Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:36.905618Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:36.905657Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:36.905696Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:36.905734Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:36.905772Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-10T12:23:37.108832Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2026-01-10T12:23:37.108902Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:37.108939Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:37.108974Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:37.109013Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:37.109059Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2026-01-10T12:23:37.333928Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2026-01-10T12:23:37.334001Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:37.334038Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:37.334076Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:37.334110Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2026-01-10T12:23:37.578872Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:726] 2026-01-10T12:23:37.578945Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:37.579008Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:37.579049Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-10T12:23:37.957071Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:733] 2026-01-10T12:23:37.957138Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:37.957175Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-10T12:23:38.772235Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:740] 2026-01-10T12:23:38.772302Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-10T12:23:39.122186Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 8486423273704142117 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TxUsage::WriteToTopic_Demo_40_Table [GOOD] |93.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeStatus [GOOD] |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::IndexMaterializationDisabled [GOOD] >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::CommandRollbackWhenAlone >> BasicUsage::ConflictingWrites [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::IndexMaterialization >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> RemoteTopicReader::ReadTopic >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> RemoteTopicReader::PassAwayOnCreatingReadSession >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TxUsage::Write_And_Read_Small_Messages_1 [GOOD] >> TExportToS3Tests::RebootDuringAbortion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:27.928475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:28.042656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:28.050441Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:28.050845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:28.050929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:28.470449Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:28.576252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.576398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.610819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.683365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:29.357659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:29.359085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:29.359135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:29.359175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:29.359335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:29.425516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:29.963840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:33.252584Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:33.259666Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:33.297252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.297405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.336626Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:33.338650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:33.510895Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.511519Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.512009Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.512689Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.512966Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.513055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.513159Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.513313Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.513496Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.700620Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:33.734460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.734578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.747792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:33.909127Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:33.953753Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:33.953892Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:33.988480Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:33.990211Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:33.990523Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:33.990603Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:33.990669Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:33.990729Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:33.990783Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:33.990845Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:33.991598Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:34.191331Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.191448Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2271:2643], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:34.207031Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2298:2667] 2026-01-10T12:21:34.207904Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2298:2667], schemeshard id = 72075186224037897 2026-01-10T12:21:34.353434Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2428:2755] 2026-01-10T12:21:34.353840Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:34.356416Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Describe result: PathErrorUnknown 2026-01-10T12:21:34.356486Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Creating table 2026-01-10T12:21:34.356590Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:34.360195Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2447:2762], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:34.363632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:34.371800Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:34.371951Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:34.386988Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:34.457132Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:34.859661Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:34.949121Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:34.949220Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2430:2757] Owner: [2:2429:2756]. Column diff is empty, finishing 2026-01-10T12:21:35.439850Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:35.673988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:3385], Data ... :1273: SyncNode(), pipe client id = [2:4333:3820] 2026-01-10T12:22:02.398655Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4334:3821] 2026-01-10T12:22:02.398771Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4334:3821], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:02.398816Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:02.398893Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4333:3820], server id = [2:4334:3821], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:02.398974Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:02.399037Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4331:3818], StatRequests.size() = 1 2026-01-10T12:22:02.399195Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:22:54.842521Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpfdm74dwvfnr80qg4wy9", SessionId: ydb://session/3?node_id=2&id=OGU5NjVhZmMtYzRkZWJhN2MtZTBkM2MyYTMtOTVkMDQyNmQ=, Slow query, duration: 52.929429s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:22:54.843984Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4196:3743], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:22:54.844202Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4196:3743], Start read next stream part 2026-01-10T12:22:54.844630Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-10T12:22:54.844739Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:54.847547Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:22:54.847661Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], Start read next stream part 2026-01-10T12:22:54.847948Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4196:3743], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:22:54.847990Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4196:3743], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTdlMTM0NDgtZTkwNGUxNWYtNGY4NGQ2MTktM2JkYTQxYmY=, TxId: 2026-01-10T12:22:54.953340Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4468:3941]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:54.953682Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:54.953738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4468:3941], StatRequests.size() = 1 2026-01-10T12:23:40.482973Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxr33z5yqg9xs56n7bz66v", SessionId: ydb://session/3?node_id=2&id=MTZmNWVmZjEtNTI1YWQyYWYtZDU4MzQyMmMtMWQ3NTY1N2Q=, Slow query, duration: 45.631133s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:40.484763Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:40.484866Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], Start read next stream part 2026-01-10T12:23:40.485177Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33200, txId: 18446744073709551615] shutting down 2026-01-10T12:23:40.485454Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4556:4024], ActorId: [2:4557:4025], Starting query actor #1 [2:4558:4026] 2026-01-10T12:23:40.485506Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4557:4025], ActorId: [2:4558:4026], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:23:40.488977Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4557:4025], ActorId: [2:4558:4026], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzVmMjIyMmQtZjg2YzdhMjYtMjcwN2I3YjEtYmQ0YWE5ZTY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:23:40.489704Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:40.489753Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4186:2470], ActorId: [2:4444:3921], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmIyOTc2MDMtODQzNzdkNjctMjQxZDM0NjAtYTMyM2MwZDU=, TxId: 2026-01-10T12:23:40.550318Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4575:4040]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:40.550608Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:40.550649Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4575:4040], StatRequests.size() = 1 2026-01-10T12:23:40.683954Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4557:4025], ActorId: [2:4558:4026], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzVmMjIyMmQtZjg2YzdhMjYtMjcwN2I3YjEtYmQ0YWE5ZTY=, TxId: 2026-01-10T12:23:40.684033Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4557:4025], ActorId: [2:4558:4026], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzVmMjIyMmQtZjg2YzdhMjYtMjcwN2I3YjEtYmQ0YWE5ZTY=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:23:40.685798Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4591:4048] 2026-01-10T12:23:40.686266Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_IN_PROGRESS 2026-01-10T12:23:40.687226Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4593:4049]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 2
FastCheckInFlight: 1
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 0
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 33], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 34], [OwnerId: 72075186224037897, LocalPathId: 33]
ForceTraversals: 1
    1970-01-01T00:00:04Z
NavigatePathId: 
ForceTraversalOperationId: operationId
  CreatedAt: 1970-01-01T00:00:04.040012Z
, ReplyToActorId: [1:1263:3036]
, Types: 1
, Tables size: 1
, Tables: 
    Table[0] PathId: [OwnerId: 72075186224037897, LocalPathId: 34]
        Status: AnalyzeStarted
        AnalyzedShards size: 0
        ColumnTags: 1,2
TraversalStartTime: 2026-01-10T12:22:01Z
TraversalDatabase: 
TraversalPathId: [OwnerId: 72075186224037897, LocalPathId: 34]
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... 2026-01-10T12:23:40.688255Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4556:4024], ActorId: [2:4557:4025], Got response [2:4558:4026] SUCCESS 2026-01-10T12:23:40.688460Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:40.712986Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:23:40.713055Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:1263:3036] 2026-01-10T12:23:40.714203Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4597:4052] 2026-01-10T12:23:40.714707Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2026-01-10T12:23:38.270243Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:38.271242Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:38.271508Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:38.272966Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:38.273300Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:38.273567Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.273587Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.273760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:38.281554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:38.281635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:38.281779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:38.281854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.281913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.281960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-10T12:23:38.293053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:38.293184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.316758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.316895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.316995Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.317084Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.317186Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.317229Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.317266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.317315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.328044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.328189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.338949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.339096Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:38.340462Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:38.340524Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:38.340764Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:38.340811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:38.354470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:206:2077] 2026-01-10T12:23:40.333697Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:40.334317Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:40.334523Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:40.335778Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:40.336233Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:40.336449Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:40.336475Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:40.336675Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:40.345520Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:40.345644Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:40.345748Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:40.345849Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:40.345964Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:40.346044Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-10T12:23:40.357278Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:40.357434Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:40.381043Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:40.381171Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:40.381245Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:40.381320Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:40.381440Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:40.381487Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:40.381520Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:40.381558Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:40.392247Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:40.392374Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:40.402997Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:40.403095Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:40.404180Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:40.404222Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:40.404369Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:40.404398Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:40.404856Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:29:2057] recipient: [21:24:2068] 2026-01-10T12:23:42.367307Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:42.368015Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:42.368209Z node 21 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:42.369000Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:42.369365Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:42.369463Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.369482Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.369647Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:42.376949Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:42.377045Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:42.377164Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:42.377256Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.377351Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.377417Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:49:2057] recipient: [21:11:2058] 2026-01-10T12:23:42.388546Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:42.388666Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.409808Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.409927Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.409987Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.410050Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.410144Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.410179Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.410229Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.410268Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.420777Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.420860Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.431478Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.431599Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:42.432611Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:42.432650Z node 21 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:42.432808Z node 21 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:42.432862Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:42.435563Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:29:2057] recipient: [22:24:2068] 2026-01-10T12:23:42.634566Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:42.635348Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:42.635536Z node 22 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:42.636690Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:42.636813Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:42.636886Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.636903Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.637028Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:42.646088Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:42.646205Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:42.646329Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:42.646439Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.646532Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.646593Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:49:2057] recipient: [22:11:2058] 2026-01-10T12:23:42.657993Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:42.658203Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.679877Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.680015Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.680110Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.680211Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.680336Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.680396Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.680429Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.680470Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.691179Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.691317Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.702080Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.702223Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:42.703598Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:42.703669Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:42.703856Z node 22 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:42.703908Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:42.704697Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::ConflictingWrites [GOOD] Test command err: 2026-01-10T12:20:49.984052Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706831932146941:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:49.984146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.097156Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.608165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.633665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.633773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.745194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.931765Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.948357Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706831932146907:2081] 1768047649974410 != 1768047649974413 2026-01-10T12:20:50.977450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.004597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.105630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001a39/r3tmp/yandexsLZsD5.tmp 2026-01-10T12:20:51.105659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001a39/r3tmp/yandexsLZsD5.tmp 2026-01-10T12:20:51.105866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001a39/r3tmp/yandexsLZsD5.tmp 2026-01-10T12:20:51.105962Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.344484Z INFO: TTestServer started on Port 17614 GrpcPort 7291 PQClient connected to localhost:7291 2026-01-10T12:20:51.635276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.648273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:20:51.660896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.767141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.970026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-10T12:20:54.446812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853406984433:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.446930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.448515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853406984445:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.448572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853406984446:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.448766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.453641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.471427Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706853406984449:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-10T12:20:54.722854Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706853406984513:2645] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.776278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.819339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.910796Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706853406984521:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.913719Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2FkMDVkMDctNjE3Mzg0OTgtNzhiYjBiMjAtNTYzYTA3Mzk=, ActorId: [1:7593706853406984402:2326], ActorState: ExecuteState, LegacyTraceId: 01kekxmdgk8g6vywbd0bpzwe61, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.916816Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.944513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.984374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706831932146941:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:54.984464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593706857701952103:2824] === CheckClustersList. Ok 2026-01-10T12:21:01.596011Z :CreateTopicWithCustomName INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.623996Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.646801Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706883471756112:2937] connected; active server actors: 1 2026-01-10T12:21:01.647008Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic-1768047661] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.656404Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic-1768047661] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.656577Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic-1768047661] BALANCER INIT DONE for test-topic-1768047661: (0, 72075186224037892) 2026-01-10T12:21:01.662338Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.663206Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_1844674407370955 ... ime { nanos: 2000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:23:41.401111Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] OnAck: seqNo=96, txId=? 2026-01-10T12:23:41.401133Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: acknoledged message 96 2026-01-10T12:23:41.401341Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:23:41.401454Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 97 written { offset: 96 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 2000000 } max_queue_wait_time { nanos: 2000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:23:41.401495Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] OnAck: seqNo=97, txId=? 2026-01-10T12:23:41.401520Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: acknoledged message 97 2026-01-10T12:23:41.401692Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:23:41.401787Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 98 written { offset: 97 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 2000000 } max_queue_wait_time { nanos: 2000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:23:41.401811Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] OnAck: seqNo=98, txId=? 2026-01-10T12:23:41.401836Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: acknoledged message 98 2026-01-10T12:23:41.401991Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:23:41.402081Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 99 written { offset: 98 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 2000000 } max_queue_wait_time { nanos: 2000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:23:41.402103Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] OnAck: seqNo=99, txId=? 2026-01-10T12:23:41.402126Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: acknoledged message 99 2026-01-10T12:23:41.402250Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-10T12:23:41.402353Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 100 written { offset: 99 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 2000000 } max_queue_wait_time { nanos: 2000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-10T12:23:41.402383Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] OnAck: seqNo=100, txId=? 2026-01-10T12:23:41.402405Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: acknoledged message 100 2026-01-10T12:23:41.439013Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session will now close 2026-01-10T12:23:41.439105Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: aborting 2026-01-10T12:23:41.440372Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: gracefully shut down, all writes complete 2026-01-10T12:23:41.440446Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:23:41.440507Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:23:41.440577Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session is aborting and will not restart 2026-01-10T12:23:41.440601Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 13 sessionId: test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0 grpc read done: success: 0 data: 2026-01-10T12:23:41.440638Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 13 sessionId: test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0 grpc read failed 2026-01-10T12:23:41.441104Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 13 sessionId: test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0 2026-01-10T12:23:41.441135Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 13 sessionId: test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0 is DEAD 2026-01-10T12:23:41.441579Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:23:41.441816Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7593707568346236373:2677] destroyed 2026-01-10T12:23:41.441877Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:23:41.441930Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:41.441966Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.441996Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:41.442034Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.442058Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:41.444802Z node 14 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-10T12:23:41.445049Z node 14 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path test-topic 2026-01-10T12:23:41.446323Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [14:7593707572641203722:2695], now have 1 active actors on pipe 2026-01-10T12:23:41.446367Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][test-topic] pipe [14:7593707572641203723:2696] connected; active server actors: 1 2026-01-10T12:23:41.446752Z node 14 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2026-01-10T12:23:41.447336Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7593707572641203722:2695] destroyed 2026-01-10T12:23:41.449406Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d6d940ee-2f0e04cb-d9da4faa-77ba517d_0] PartitionId [0] Generation [11] Write session: destroy 2026-01-10T12:23:41.490898Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:41.490952Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.490981Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:41.491016Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.491047Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:41.591247Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:41.591299Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.591328Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:41.591364Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.591396Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:41.691577Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:41.691636Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.691673Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:41.691710Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:41.691741Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |93.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TExportToS3Tests::IndexMaterialization [GOOD] >> TxUsage::WriteToTopic_Demo_40_Query >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::IndexMaterializationGlobal >> TxUsage::Write_And_Read_Small_Messages_2 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2026-01-10T12:22:28.874376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:28.875568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:28.875630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:28.886889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:28.887262Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:28.887550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:28.936618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:28.945605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:28.946255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:28.948038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:28.948139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:28.948193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:28.948638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:28.948735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:28.948818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:29.030955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:29.067162Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:29.067361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:29.067467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:29.067531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:29.067576Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:29.067617Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:29.067880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.067941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.068307Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:29.068435Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:29.068506Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:29.068566Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:29.068605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:29.068648Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:29.068705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:29.068748Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:29.068794Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:29.068899Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.068941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.069037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:29.082297Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:29.082389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:29.082506Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:29.082733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:29.082829Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:29.082900Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:29.082961Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:29.083009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:29.083052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:29.083089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:29.083439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:29.083496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:29.083549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:29.083586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:29.083657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:29.083693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:29.083734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:29.083767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:29.083795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:29.096159Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:29.096236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:29.096277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:29.096362Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:29.096465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:29.097130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.097223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:29.097281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:29.097365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:29.097403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:29.097530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:29.097575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:29.097615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:29.097651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:29.101737Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:29.101814Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:29.102049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.102095Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:29.102152Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:29.102200Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:29.102239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:29.102288Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:29.102331Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... t 9437184 on unit CompleteOperation 2026-01-10T12:23:42.184554Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:37] at 9437184 is DelayComplete 2026-01-10T12:23:42.184580Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:37] at 9437184 executing on unit CompleteOperation 2026-01-10T12:23:42.184606Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:37] at 9437184 to execution unit CompletedOperations 2026-01-10T12:23:42.184654Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:37] at 9437184 on unit CompletedOperations 2026-01-10T12:23:42.184687Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:37] at 9437184 is Executed 2026-01-10T12:23:42.184707Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:37] at 9437184 executing on unit CompletedOperations 2026-01-10T12:23:42.184739Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:37] at 9437184 has finished 2026-01-10T12:23:42.184761Z node 32 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:23:42.184784Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:23:42.184808Z node 32 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:23:42.184834Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:23:42.184863Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:361: Check active operation [1000004:5] at 9437184 on unit LoadAndWaitInRS 2026-01-10T12:23:42.184890Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:378: Active operation [1000004:5] at 9437184 is not ready for LoadAndWaitInRS 2026-01-10T12:23:42.227815Z node 32 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:42.227880Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2026-01-10T12:23:42.227932Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:42.227990Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:23:42.228026Z node 32 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:42.228288Z node 32 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:42.228316Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2026-01-10T12:23:42.228359Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:42.228388Z node 32 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:42.228587Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:23:42.228630Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:42.228666Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2026-01-10T12:23:42.260026Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [32:350:2317], Recipient [32:806:2732]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 2 Flags# 0} 2026-01-10T12:23:42.260119Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:23:42.260165Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437184 source 9437185 dest 9437184 producer 9437185 txId 5 2026-01-10T12:23:42.260256Z node 32 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 2 Flags# 0} 2026-01-10T12:23:42.260312Z node 32 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:5] from=9437185 to=9437184origin=9437185 2026-01-10T12:23:42.260392Z node 32 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 resending delayed RS 2026-01-10T12:23:42.262048Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [32:806:2732], Recipient [32:806:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:42.262106Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:42.262167Z node 32 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:23:42.262209Z node 32 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:23:42.262249Z node 32 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:5] at 9437184 for LoadAndWaitInRS 2026-01-10T12:23:42.262284Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:5] at 9437184 on unit LoadAndWaitInRS 2026-01-10T12:23:42.262328Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:5] at 9437184 is Executed 2026-01-10T12:23:42.262367Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:5] at 9437184 executing on unit LoadAndWaitInRS 2026-01-10T12:23:42.262404Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:5] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:23:42.262437Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:5] at 9437184 on unit BlockFailPoint 2026-01-10T12:23:42.262468Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:5] at 9437184 is Executed 2026-01-10T12:23:42.262493Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:5] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:23:42.262522Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:5] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:23:42.262552Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:5] at 9437184 on unit ExecuteDataTx 2026-01-10T12:23:42.263279Z node 32 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000004:5] at tablet 9437184 with status COMPLETE 2026-01-10T12:23:42.263347Z node 32 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000004:5] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 11} 2026-01-10T12:23:42.263409Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:5] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:23:42.263442Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:5] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:23:42.263481Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:5] at 9437184 to execution unit CompleteOperation 2026-01-10T12:23:42.263515Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:5] at 9437184 on unit CompleteOperation 2026-01-10T12:23:42.263749Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:5] at 9437184 is DelayComplete 2026-01-10T12:23:42.263786Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:5] at 9437184 executing on unit CompleteOperation 2026-01-10T12:23:42.263818Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:5] at 9437184 to execution unit CompletedOperations 2026-01-10T12:23:42.263850Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:5] at 9437184 on unit CompletedOperations 2026-01-10T12:23:42.263890Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:5] at 9437184 is Executed 2026-01-10T12:23:42.263916Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:5] at 9437184 executing on unit CompletedOperations 2026-01-10T12:23:42.263945Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:5] at 9437184 has finished 2026-01-10T12:23:42.263981Z node 32 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:23:42.264011Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:23:42.264046Z node 32 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:23:42.264097Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:23:42.276534Z node 32 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:42.276597Z node 32 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2026-01-10T12:23:42.276664Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [32:103:2137], exec latency: 49 ms, propose latency: 50 ms 2026-01-10T12:23:42.276742Z node 32 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-10T12:23:42.276788Z node 32 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:42.277113Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-10T12:23:42.277164Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:42.277208Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 31 27 29 30 31 30 30 31 30 24 31 29 18 31 25 30 30 29 9 31 16 9 26 2 16 26 23 8 23 - 26 - actual 31 27 29 30 31 30 30 31 30 24 31 29 18 31 25 30 30 29 9 31 16 9 26 2 16 26 23 8 23 - 26 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq [GOOD] >> IndexBuildTest::CancellationNoTable >> TExportToS3Tests::CorruptedDyNumber |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath [GOOD] >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> TExportToS3Tests::ReplicationExportWithStaticCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2026-01-10T12:23:36.353147Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.364113Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.367761Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.370538Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.370979Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.371222Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371247Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371781Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.382442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.382553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.384169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.384342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384456Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-10T12:23:36.400221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.400351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.426167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.426282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.426402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.426483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.426609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.426672Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.426707Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.426759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.437519Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.437660Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.448435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.448562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:36.449910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:36.449971Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:36.450224Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:36.452234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:36.477778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-10T12:23:36.485546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-10T12:23:36.486226Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:206:2077] 2026-01-10T12:23:38.591425Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:38.592425Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:38.592641Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:38.593887Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:38.594230Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:38.594477Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.594515Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:38.594730Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:38.603521Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:38.603683Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:38.603817Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:38.603942Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.604043Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:38.604126Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-10T12:23:38.615483Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:38.615630Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.640652Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:38.640773Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.640870Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:38.640977Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.641099Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:38.641154Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.641192Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:38.641251Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.652059Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:38.652330Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.663017Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:38.663142Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:38.664396Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:38.664452Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:38.664723Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:38.664804Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:38.665489Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-10T12:23:38.666583Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-10T12:23:40.748797Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-10T12:23:40.749438Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-10T12:23:40.750148Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-10T12:23:40.750888Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-10T12:23:40.751615Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-10T12:23:40.752454Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-10T12:23:40.753135Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-10T12:23:40.753872Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-10T12:23:40.754570Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-10T12:23:40.755367Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-10T12:23:40.756192Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-10T12:23:40.756981Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-10T12:23:40.757724Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-10T12:23:40.758544Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:204:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:204:2077] Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:227:2066] recipient: [31:204:2077] 2026-01-10T12:23:42.751337Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:42.752055Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:42.752301Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:42.753303Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:42.753677Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:42.753849Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.753875Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:42.754010Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:42.761065Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:42.761174Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:42.761261Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:42.761370Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.761428Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:42.761468Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:247:2066] recipient: [31:20:2067] 2026-01-10T12:23:42.772567Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:42.772689Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.796269Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:42.796373Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.796452Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:42.796512Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.796600Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:42.796648Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.796673Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:42.796710Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.807263Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:42.807371Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.818007Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:42.818123Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:42.819256Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:42.819304Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:42.819484Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:42.819543Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:42.820060Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2026-01-10T12:23:42.821130Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2026-01-10T12:23:42.821783Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2026-01-10T12:23:42.822370Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2026-01-10T12:23:42.823048Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2026-01-10T12:23:42.823732Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-10T12:23:42.824374Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-10T12:23:42.825025Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-10T12:23:42.825638Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-10T12:23:42.826391Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-10T12:23:42.827105Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-10T12:23:42.827896Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-10T12:23:42.828635Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-10T12:23:42.829323Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-10T12:23:42.830041Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-10T12:23:42.830773Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-10T12:23:42.831473Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-10T12:23:42.832064Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-10T12:23:42.832636Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-10T12:23:42.833267Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> IndexBuildTest::CancellationNoTable [GOOD] >> IndexBuildTest::CancellationNoTableUniq >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> BsControllerConfig::ReassignGroupDisk [GOOD] >> YdbTableSplit::SplitByLoadWithReads >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::IndexMaterializationGlobal [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> TExportToS3Tests::ReplicationExportWithStaticCredentials [GOOD] >> YdbTableSplit::RenameTablesAndSplit >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> TExportToS3Tests::IndexMaterializationGlobalAsync >> TExportToS3Tests::ReplicationExportWithOAuthCredentials >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2026-01-10T12:23:42.861246Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707573984194730:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:42.861304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:43.080969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:43.137373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:43.137543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:43.141964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:43.188595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:43.359704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:43.480092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:43.480166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:43.480177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:43.480248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:43.859277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:43.864362Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7593707578279162822:2491] Handshake: worker# [1:7593707578279162820:2489] 2026-01-10T12:23:43.864545Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7593707578279162822:2491] Create read session: session# [1:7593707578279162823:2492] 2026-01-10T12:23:43.868026Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2026-01-10T12:23:36.353108Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.364184Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.367801Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.370671Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.371106Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.371368Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371397Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.371785Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.382029Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.382150Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.384152Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.384366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.384532Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:248:2066] recipient: [1:20:2067] 2026-01-10T12:23:36.400218Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.400362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.425884Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.425995Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.426073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.426155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.426271Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.426325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.426368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.426419Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.437042Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.437178Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.447885Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.448026Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:36.449839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:36.449916Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:36.450116Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:36.452244Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:36.478806Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2026-01-10T12:23:36.479489Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-10T12:23:36.479537Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-10T12:23:36.479554Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-10T12:23:36.479571Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-10T12:23:36.479596Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-10T12:23:36.479624Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-10T12:23:36.479653Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-10T12:23:36.479669Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-10T12:23:36.479699Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-10T12:23:36.479721Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-10T12:23:36.479755Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-10T12:23:36.479777Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-10T12:23:36.479793Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-10T12:23:36.479807Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-10T12:23:36.479841Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-10T12:23:36.479858Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-10T12:23:36.479888Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-10T12:23:36.479909Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-10T12:23:36.479923Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-10T12:23:36.479937Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-10T12:23:36.479950Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-10T12:23:36.479962Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-10T12:23:36.479988Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-10T12:23:36.480002Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-10T12:23:36.480015Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-10T12:23:36.480038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-10T12:23:36.480065Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-10T12:23:36.480103Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-10T12:23:36.480117Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-10T12:23:36.480131Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:206:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:206:2077] 2026-01-10T12:23:38.614193Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:38.615095Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:38.615363Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:38.616721Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:38.617168Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:38.617424Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNo ... Port: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:23:40.843718Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2026-01-10T12:23:40.843772Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2026-01-10T12:23:40.843816Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2026-01-10T12:23:40.843847Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2026-01-10T12:23:40.843879Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 25:1000 Path# /dev/disk 2026-01-10T12:23:40.843908Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 26:1000 Path# /dev/disk 2026-01-10T12:23:40.843935Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 27:1000 Path# /dev/disk 2026-01-10T12:23:40.843960Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 28:1000 Path# /dev/disk 2026-01-10T12:23:40.843984Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 29:1000 Path# /dev/disk 2026-01-10T12:23:40.844008Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 30:1000 Path# /dev/disk 2026-01-10T12:23:40.844050Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 31:1000 Path# /dev/disk 2026-01-10T12:23:40.844107Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 32:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-10T12:23:40.865321Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:265:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:265:2079] Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:293:2068] recipient: [33:265:2079] 2026-01-10T12:23:43.418393Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:43.419487Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:43.419789Z node 33 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:43.421459Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:43.421650Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:43.421867Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:43.421895Z node 33 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:43.422142Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:43.432526Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:43.432711Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:43.432836Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:43.433004Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:43.433139Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:43.433278Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:314:2068] recipient: [33:22:2069] 2026-01-10T12:23:43.444984Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:43.445163Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:43.470973Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:43.471124Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:43.471205Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:43.471281Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:43.471423Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:43.471489Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:43.471543Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:43.471610Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:43.482381Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:43.482510Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:43.493273Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:43.493399Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:43.494785Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:43.494860Z node 33 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:43.495085Z node 33 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:43.495133Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:43.495867Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:23:43.496270Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 33:1000 Path# /dev/disk 2026-01-10T12:23:43.496303Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 34:1000 Path# /dev/disk 2026-01-10T12:23:43.496332Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 35:1000 Path# /dev/disk 2026-01-10T12:23:43.496360Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 36:1000 Path# /dev/disk 2026-01-10T12:23:43.496377Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 37:1000 Path# /dev/disk 2026-01-10T12:23:43.496402Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 38:1000 Path# /dev/disk 2026-01-10T12:23:43.496426Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 39:1000 Path# /dev/disk 2026-01-10T12:23:43.496443Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 40:1000 Path# /dev/disk 2026-01-10T12:23:43.496458Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 41:1000 Path# /dev/disk 2026-01-10T12:23:43.496472Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 42:1000 Path# /dev/disk 2026-01-10T12:23:43.496486Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 43:1000 Path# /dev/disk 2026-01-10T12:23:43.496504Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 44:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-10T12:23:43.515891Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> IndexBuildTest::CancellationNoTableUniq [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex >> YdbTableSplit::SplitByLoadWithUpdates >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::ReplicationExportWithOAuthCredentials [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query >> TExportToS3Tests::ReplicationExportMultipleItems >> TExportToS3Tests::IndexMaterializationGlobalAsync [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> OperationMapping::IndexBuildRejected [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData [GOOD] >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [GOOD] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TReplicationTests::Disabled >> TExportToS3Tests::ShouldSucceedOnConcurrentExport |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TExportToS3Tests::ReplicationExportMultipleItems [GOOD] >> TReplicationTests::Create >> TExportToS3Tests::ReplicationExportGlobalConsistency >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> TxUsage::ReadRuleGeneration [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> ReadOnlyVDisk::TestSync [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::CancelBuildUniq ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2026-01-10T12:22:35.429804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:35.430858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:35.430904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:35.441638Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:35.441974Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:35.442250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:35.485560Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:35.493162Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:35.493603Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:35.494736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:35.494787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:35.494820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:35.495058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:35.495119Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:35.495167Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:35.568266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:35.597005Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:35.597199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:35.597309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:35.597371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:35.597416Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:35.597456Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.597724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.597782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.598096Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:35.598224Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:35.598297Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.598349Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:35.598389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:35.598429Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:35.598485Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:35.598523Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:35.598567Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:35.598670Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.598709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.598790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:35.606024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:35.606107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:35.606207Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:35.606402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:35.606469Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:35.606534Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:35.606591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:35.606625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:35.606663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:35.606695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.607039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:35.607094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:35.607134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:35.607170Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.607243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:35.607275Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:35.607307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:35.607338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.607366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:35.620653Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:35.620734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:35.620770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:35.620815Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:35.620894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:35.621385Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.621445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:35.621479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:35.621538Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:35.621566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:35.621654Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:35.621681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:35.621713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:35.621736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:35.624554Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:35.624610Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:35.624767Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.624794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:35.624844Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:35.624883Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:35.624913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:35.624946Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:35.624973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... pient [32:350:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-10T12:23:47.750457Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.750490Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-10T12:23:47.750575Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-10T12:23:47.750610Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.750642Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-10T12:23:47.750729Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-10T12:23:47.750763Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.750795Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-10T12:23:47.750878Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-10T12:23:47.750910Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.750943Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-10T12:23:47.751044Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-10T12:23:47.751079Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751109Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-10T12:23:47.751177Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-10T12:23:47.751212Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751244Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-10T12:23:47.751334Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-10T12:23:47.751370Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751401Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-10T12:23:47.751487Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-10T12:23:47.751521Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751551Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-10T12:23:47.751649Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-10T12:23:47.751681Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751712Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-10T12:23:47.751796Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-10T12:23:47.751830Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.751862Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-10T12:23:47.751950Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-10T12:23:47.751983Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752013Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-10T12:23:47.752121Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-10T12:23:47.752158Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752189Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-10T12:23:47.752283Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-10T12:23:47.752317Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752349Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-10T12:23:47.752433Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-10T12:23:47.752467Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752501Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-10T12:23:47.752581Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-10T12:23:47.752615Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752648Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-10T12:23:47.752729Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-10T12:23:47.752763Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752795Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-10T12:23:47.752881Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-10T12:23:47.752916Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.752946Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-10T12:23:47.753029Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-10T12:23:47.753063Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.753094Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-10T12:23:47.753180Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-10T12:23:47.753213Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.753247Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-10T12:23:47.753332Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:806:2732], Recipient [32:350:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:23:47.753368Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:47.753403Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 28 29 30 31 31 27 31 31 31 27 22 30 29 30 31 22 31 27 22 27 31 30 - 14 30 - - - - - - - actual 28 29 30 31 31 27 31 31 31 27 22 30 29 30 31 22 31 27 22 27 31 30 - 14 30 - - - - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> OperationMapping::IndexBuildCanceled [GOOD] >> TExportToS3Tests::ReplicationExportGlobalConsistency [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2026-01-10T12:23:37.555779Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:37.556583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:37.556617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:37.564297Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:37.564579Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:23:37.564789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:23:37.596630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:37.603304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:23:37.603799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:23:37.605038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:23:37.605093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:23:37.605125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:23:37.605377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:23:37.605460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:23:37.605515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:23:37.673335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:23:37.700330Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:23:37.700513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:23:37.700599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:23:37.700645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:23:37.700676Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:23:37.700704Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:37.700913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:37.700959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:37.701209Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:23:37.701290Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:23:37.701332Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:23:37.701371Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:23:37.701404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:23:37.701431Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:23:37.701471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:23:37.701498Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:23:37.701529Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:37.701592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:23:37.701618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:23:37.701671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:23:37.710360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:23:37.710439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:23:37.710541Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:23:37.710704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:23:37.710764Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:23:37.710815Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:23:37.710863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:23:37.710898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:23:37.710930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:23:37.710956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:23:37.711205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:23:37.711240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:23:37.711270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:23:37.711295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:23:37.711350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:23:37.711372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:23:37.711398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:23:37.711422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:23:37.711441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:23:37.723557Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:23:37.723644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:23:37.723684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:23:37.723752Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:23:37.723821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:23:37.724381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:23:37.724447Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:23:37.724491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:23:37.724557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:23:37.724584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:23:37.724715Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:23:37.724753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:23:37.724785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:23:37.724815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:23:37.727622Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:23:37.727681Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:37.727889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:37.727930Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:23:37.727978Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:23:37.728012Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:23:37.728045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:23:37.728108Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:23:37.728151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:23:48.514525Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.514591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.514634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.514677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-10T12:23:48.514702Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.514818Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:23:48.514843Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:23:48.514864Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:23:48.514889Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.514929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.514974Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.515017Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-10T12:23:48.515041Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.515180Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-10T12:23:48.515208Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.515232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.515269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.515313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-10T12:23:48.515348Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.515448Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.515474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.515527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.515599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-10T12:23:48.515626Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.515736Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.515782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.515836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.515884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-10T12:23:48.515911Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.516029Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.516068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:516] at 9437184 on unit FinishPropose 2026-01-10T12:23:48.516135Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-10T12:23:48.516248Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.516405Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.516435Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.516482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-10T12:23:48.516548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-10T12:23:48.516583Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.516698Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:23:48.516728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2026-01-10T12:23:48.516763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:23:48.516798Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:23:48.516987Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-10T12:23:48.517032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.517096Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2026-01-10T12:23:48.517441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-10T12:23:48.517494Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.517553Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2026-01-10T12:23:48.517734Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-10T12:23:48.517770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.517823Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2026-01-10T12:23:48.517998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-10T12:23:48.518037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.518062Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2026-01-10T12:23:48.518177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-10T12:23:48.518208Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.518233Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2026-01-10T12:23:48.518417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-10T12:23:48.518452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.518493Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2026-01-10T12:23:48.518660Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-10T12:23:48.518693Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.518717Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2026-01-10T12:23:48.518836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-10T12:23:48.518869Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:23:48.518894Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> RemoteTopicReader::ReadTopic [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree [GOOD] >> TExportToS3Tests::ReplicatedTableExport >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 17051767092259785640 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2026-01-10T12:23:33.979904Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8828:948] 2026-01-10T12:23:33.980458Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8835:955] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-10T12:23:36.201769Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8842:962] 2026-01-10T12:23:36.201934Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8835:955] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-10T12:23:40.240044Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8856:976] 2026-01-10T12:23:40.240148Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8849:969] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-10T12:23:42.449878Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8863:983] 2026-01-10T12:23:42.450028Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8856:976] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-10T12:23:44.896427Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8870:990] 2026-01-10T12:23:44.896521Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8863:983] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2026-01-10T12:23:47.396738Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8870:990] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> TReplicationTests::Disabled [GOOD] >> TReplicationTests::CreateSequential >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2026-01-10T12:23:42.859352Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707577425353497:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:42.859548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:43.114489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:43.114616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:43.160178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:43.167763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:43.177343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:43.359624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:43.480248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:43.480279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:43.480286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:43.480368Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:43.828615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:43.868195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:44.036749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:45.397244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707590310256471:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.397253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707590310256470:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.397322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707590310256458:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.397449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.397805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707590310256479:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.397880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:45.402619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:45.456287Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707590310256480:2630] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:23:45.460799Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707590310256477:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:23:45.460803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707590310256478:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:23:45.531807Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707590310256528:2662] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:45.545666Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707590310256546:2670] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:46.620635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:46.937177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:47.359390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:23:47.729174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:23:47.859815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707577425353497:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:47.859904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:23:48.100358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:23:49.101593Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handshake: worker# [1:7593707581720321580:2488] 2026-01-10T12:23:49.106765Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Create read session: session# [1:7593707607490126385:2487] 2026-01-10T12:23:49.107068Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:23:49.122183Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_3677051503692324365_v1 } } 2026-01-10T12:23:49.128103Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2026-01-10T12:23:48.999000Z WriteTime: 2026-01-10T12:23:49.002000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-10T12:23:49.128467Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:23:49.212525Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593707607490126384:2965] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-10T12:23:49.207000Z WriteTime: 2026-01-10T12:23:49.208000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-10T12:23:49.308426Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7593707607490126474:2997] Handshake: worker# [1:7593707581720321580:2488] 2026-01-10T12:23:49.312354Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7593707607490126474:2997] Create read session: session# [1:7593707607490126475:2487] 2026-01-10T12:23:49.312555Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7593707607490126474:2997] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-10T12:23:49.318347Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7593707607490126474:2997] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_101147984758026044_v1 } } 2026-01-10T12:23:49.320134Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7593707607490126474:2997] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-10T12:23:49.207000Z WriteTime: 2026-01-10T12:23:49.208000Z MessageGroupId: producer ProducerId: producer }] } } >> TExportToS3Tests::ReplicatedTableExport [GOOD] |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.240746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.240816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.255967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.256104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.256307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.264549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.264767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.265492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.265693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.267892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.268052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.273146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.273363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.273431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.274147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.430245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.432046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.432156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.432253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.432348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.432430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-10T12:23:49.884487Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-10T12:23:49.884514Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:23:49.884571Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-10T12:23:49.886434Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:23:49.886483Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-10T12:23:49.886527Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:23:49.887204Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-10T12:23:49.887317Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-10T12:23:49.887638Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:49.887766Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 25769805935 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:49.887819Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-10T12:23:49.887953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-10T12:23:49.888032Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-10T12:23:49.888104Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:23:49.888167Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-10T12:23:49.888211Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:23:49.888270Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:49.888338Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:23:49.888385Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-10T12:23:49.888442Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:23:49.888495Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-10T12:23:49.888539Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-10T12:23:49.888613Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:23:49.888661Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-10T12:23:49.888710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:23:49.888754Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-10T12:23:49.889475Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.889588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.890967Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:49.891016Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:49.891184Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:23:49.891307Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:49.891345Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:212:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-10T12:23:49.891389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:212:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-10T12:23:49.892140Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.892227Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.892265Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:23:49.892313Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:23:49.892357Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:23:49.892676Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.892729Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.892750Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:23:49.892773Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:23:49.892796Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:23:49.892856Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-10T12:23:49.892910Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2153] 2026-01-10T12:23:49.893375Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:23:49.893412Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:23:49.893474Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:49.894900Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.895909Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:23:49.896007Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:23:49.896056Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:23:49.896535Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:23:49.897752Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-10T12:23:49.897815Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [6:919:2877] TestWaitNotification: OK eventTxId 112 >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [GOOD] |93.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2026-01-10T12:20:50.027142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706837701190732:2257];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.042039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.146223Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.544312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.555638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.555744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.694166Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.696405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.710026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706833406223208:2081] 1768047649975759 != 1768047649975762 2026-01-10T12:20:50.848458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.032335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.104286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001a3e/r3tmp/yandexUvgFKq.tmp 2026-01-10T12:20:51.104312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001a3e/r3tmp/yandexUvgFKq.tmp 2026-01-10T12:20:51.106719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001a3e/r3tmp/yandexUvgFKq.tmp 2026-01-10T12:20:51.106810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.345780Z INFO: TTestServer started on Port 65363 GrpcPort 22049 PQClient connected to localhost:22049 2026-01-10T12:20:51.742751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.749046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:51.758060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.830430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.040989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:52.052277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.105868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706854881060752:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.110306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706854881060716:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.110428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.110860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706854881060781:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.110922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.111413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.133360Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706854881060754:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.211733Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706854881060820:2648] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.718467Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706854881060828:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.729214Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTc3MzQ0MTYtOTBkOTkyZTgtYjA0YTk1NDAtYmEzZWRhZTE=, ActorId: [1:7593706854881060711:2328], ActorState: ExecuteState, LegacyTraceId: 01kekxmd6qbyesk034y3r0zt82, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.735541Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.776047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.820668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.904752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.025920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706837701190732:2257];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.026043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706859176028411:2827] === CheckClustersList. Ok 2026-01-10T12:21:01.645936Z :CreateTopicWithStreamingConsumer INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.681566Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.706402Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706884945832415:2938] connected; active server actors: 1 2026-01-10T12:21:01.706631Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][topic_name] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.710978Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][topic_name] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.711114Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][topic_name] BALANCER INIT DONE for topic_name: (0, 72075186224037892) 2026-01-10T12:21:01.714379Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:0 ... 2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:48.570875Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:23:48.570915Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:48.634874Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:48.634919Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.634941Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:48.634970Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.634992Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:48.735219Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:48.735256Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.735274Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:48.735297Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.735325Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:48.835617Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:48.835673Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.835697Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:48.835727Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.835752Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:48.860601Z :INFO: [/Root] [/Root] [4a446f2-ca1ef02b-7d263b0-d5231153] Closing read session. Close timeout: 0.000000s 2026-01-10T12:23:48.860674Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2026-01-10T12:23:48.860737Z :INFO: [/Root] [/Root] [4a446f2-ca1ef02b-7d263b0-d5231153] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:23:48.860876Z :NOTICE: [/Root] [/Root] [4a446f2-ca1ef02b-7d263b0-d5231153] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:23:48.860934Z :DEBUG: [/Root] [/Root] [4a446f2-ca1ef02b-7d263b0-d5231153] [] Abort session to cluster 2026-01-10T12:23:48.861567Z :DEBUG: [/Root] 0x00007D940FEF9190 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_11715414696017677048_v1 Close 2026-01-10T12:23:48.861743Z :DEBUG: [/Root] 0x00007D940FEF9190 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_11715414696017677048_v1 Close 2026-01-10T12:23:48.861902Z :NOTICE: [/Root] [/Root] [4a446f2-ca1ef02b-7d263b0-d5231153] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:23:48.862388Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 grpc read done: success# 0, data# { } 2026-01-10T12:23:48.862424Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 grpc read failed 2026-01-10T12:23:48.862464Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 3 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 closed 2026-01-10T12:23:48.862655Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 is DEAD 2026-01-10T12:23:48.863333Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7593707591423916148:2521]: session cookie 4 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 grpc read done: success# 0, data# { } 2026-01-10T12:23:48.863374Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7593707591423916148:2521]: session cookie 4 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1grpc read failed 2026-01-10T12:23:48.863421Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7593707591423916148:2521]: session cookie 4 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 grpc closed 2026-01-10T12:23:48.863461Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7593707591423916148:2521]: session cookie 4 consumer consumer-1 session consumer-1_14_3_11715414696017677048_v1 proxy is DEAD 2026-01-10T12:23:48.864350Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:23:48.864330Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session consumer-1_14_3_11715414696017677048_v1 2026-01-10T12:23:48.864418Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:23:48.864498Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:23:48.864389Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7593707591423916142:2519] destroyed 2026-01-10T12:23:48.864436Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][test-topic] pipe [14:7593707591423916139:2516] disconnected. 2026-01-10T12:23:48.864472Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][test-topic] pipe [14:7593707591423916139:2516] disconnected; active server actors: 1 2026-01-10T12:23:48.864514Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][test-topic] pipe [14:7593707591423916139:2516] client consumer-1 disconnected session consumer-1_14_3_11715414696017677048_v1 2026-01-10T12:23:48.864683Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer-1_14_3_11715414696017677048_v1 2026-01-10T12:23:48.865392Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:23:48.865457Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:23:48.865521Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:23:48.865586Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2026-01-10T12:23:48.865660Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:23:48.865619Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0 grpc read done: success: 0 data: 2026-01-10T12:23:48.865646Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0 grpc read failed 2026-01-10T12:23:48.865696Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0 grpc closed 2026-01-10T12:23:48.865714Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|e540ab1c-f47f2524-dd22d65d-15f4cc4a_0 is DEAD 2026-01-10T12:23:48.866844Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:23:48.867152Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7593707582833981408:2479] destroyed 2026-01-10T12:23:48.867238Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:23:48.867276Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:48.867310Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.867332Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:48.867367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.867388Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:48.935942Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:48.935984Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.936007Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:48.936035Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:48.936059Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:49.036291Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:49.036328Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:49.036345Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:49.036367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:49.036398Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::ConsistencyLevel |93.8%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] >> TExportToS3Tests::IndexMaterializationTwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ReplicatedTableExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.242691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.242749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.256134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.256237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.256393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.263965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.264201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.264857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.265078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.267137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.267303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.273151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.273321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.273385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.274130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.429720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.430635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.430775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.430883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.430951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.431747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4046678944, LocalPathId: 1], version: 8 2026-01-10T12:23:51.399493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:23:51.401160Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:51.401262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:23:51.401318Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:23:51.401357Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:23:51.401396Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:23:51.401502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-10T12:23:51.403047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1202 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:23:51.403102Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:23:51.403246Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1202 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:23:51.403389Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1202 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:23:51.404598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 30064773771 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:23:51.404644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:23:51.404756Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 30064773771 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:23:51.404814Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:23:51.404901Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 30064773771 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:23:51.404966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:51.405004Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:23:51.405040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:23:51.405077Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:23:51.408823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:23:51.409057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:23:51.409323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:23:51.409556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:23:51.409716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:23:51.409780Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:23:51.409932Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:23:51.409984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:51.410036Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:23:51.410098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:51.410156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:23:51.410247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:741:2719] message: TxId: 101 2026-01-10T12:23:51.410314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:23:51.410376Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:23:51.410422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:23:51.410599Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:23:51.412742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:23:51.412799Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [7:742:2720] TestWaitNotification: OK eventTxId 101 2026-01-10T12:23:51.413240Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:51.413517Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 290us result status StatusSuccess 2026-01-10T12:23:51.414106Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval >> TExportToS3Tests::ShouldRetryAtFinalStage >> IndexBuildTest::CancelBuildUniq [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals >> BasicStatistics::Serverless >> HttpRequest::AnalyzeServerless |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] >> BsControllerConfig::DeleteStoragePool [GOOD] >> BsControllerConfig::CommandRollbackWhenCombined >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:25.513327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:25.629689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:25.638127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:25.638598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:25.638705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:26.058660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:26.171047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:26.171181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:26.207750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:26.296128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:26.954477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:26.955995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:26.956049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:26.956165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:26.956344Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:27.012767Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:27.562296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:30.439443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:30.445193Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:30.447184Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:30.471988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.472116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.499758Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:30.501552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.682889Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:30.682991Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:30.684521Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.685070Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.685648Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.686625Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.686998Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.687218Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.687385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.687591Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.687727Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:30.703970Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:30.960198Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:30.995779Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:30.995854Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:31.027492Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:31.029962Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:31.030195Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:31.030326Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:31.030380Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:31.030449Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:31.030506Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:31.030559Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:31.031378Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:31.052847Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:31.052954Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:31.072136Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2166:2620] 2026-01-10T12:21:31.072629Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2166:2620], schemeshard id = 72075186224037897 2026-01-10T12:21:31.131413Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2227:2651] 2026-01-10T12:21:31.134094Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:21:31.150537Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Describe result: PathErrorUnknown 2026-01-10T12:21:31.150607Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Creating table 2026-01-10T12:21:31.150673Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:21:31.167807Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2319:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:31.179466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:31.204918Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:31.205034Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:31.227969Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:31.410437Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:21:31.444421Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:31.735874Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:31.864708Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:31.864816Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2236:2659] Owner: [2:2235:2658]. Column diff is empty, finishing 2026-01-10T12:21:32.556021Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... force traversal operations sent the requests. 2026-01-10T12:22:07.040439Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-10T12:22:07.040521Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.065000s, at schemeshard: 72075186224037899 2026-01-10T12:22:07.040727Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-10T12:22:07.054824Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:22:07.140807Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:22:07.143697Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:07.146680Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:22:07.146830Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], Start read next stream part 2026-01-10T12:22:07.369474Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4886:4154]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:07.369709Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:07.369899Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:22:07.369959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4889:4157] 2026-01-10T12:22:07.370024Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4889:4157] 2026-01-10T12:22:07.370385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4890:4158] 2026-01-10T12:22:07.370488Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4889:4157], server id = [2:4890:4158], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:07.370545Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4890:4158], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:07.370592Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:07.370730Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:07.370793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4886:4154], StatRequests.size() = 1 2026-01-10T12:22:07.370879Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:23:07.849332Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpmha2weatkrvf8fmqgve", SessionId: ydb://session/3?node_id=2&id=MmJmZDRiZDAtZTg4MDhkYy03ZjUzNGExYi01NmZmNmFkOA==, Slow query, duration: 60.697058s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:07.850619Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:07.850825Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], Start read next stream part 2026-01-10T12:23:07.851149Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-10T12:23:07.851242Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:07.853576Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:07.853627Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:4859:4140], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDVkNDRkMi05N2Q4NjkwLTMxYWQ2MjdiLTIzYTk5MTYy, TxId: 2026-01-10T12:23:07.854569Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:07.854670Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], Start read next stream part 2026-01-10T12:23:07.936853Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5019:4274]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:07.937128Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:23:07.937186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5019:4274], StatRequests.size() = 1 2026-01-10T12:23:53.756165Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxrfte04shef5vr1ang8q3", SessionId: ydb://session/3?node_id=2&id=ZTQ2ZWNiOC02YzM1NzQwNi1iNzJkNzdmOS01NmMzZWUxNg==, Slow query, duration: 45.898252s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:53.758525Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:53.758682Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], Start read next stream part 2026-01-10T12:23:53.758907Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33580, txId: 18446744073709551615] shutting down 2026-01-10T12:23:53.759644Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:53.759712Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4849:2462], ActorId: [2:5001:4259], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODJjOGE5NjktZTg0YmUyM2ItODI3ZWZmNTAtOTU2NGZkZDc=, TxId: 2026-01-10T12:23:53.761012Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5111:4360], ActorId: [2:5115:4363], Starting query actor #1 [2:5117:4364] 2026-01-10T12:23:53.761104Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5115:4363], ActorId: [2:5117:4364], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-10T12:23:53.765917Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5115:4363], ActorId: [2:5117:4364], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2Q4YWM4MjItMjExMzZhNjAtZTVkMzg1MGItODQzODYyZjY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:23:53.853398Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5128:4375]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:53.853822Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:53.854169Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-10T12:23:53.854247Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:23:53.854435Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:23:53.854522Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5128:4375], StatRequests.size() = 1 2026-01-10T12:23:53.854671Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:23:54.051608Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5115:4363], ActorId: [2:5117:4364], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2Q4YWM4MjItMjExMzZhNjAtZTVkMzg1MGItODQzODYyZjY=, TxId: 2026-01-10T12:23:54.051710Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5115:4363], ActorId: [2:5117:4364], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2Q4YWM4MjItMjExMzZhNjAtZTVkMzg1MGItODQzODYyZjY=, TxId: 2026-01-10T12:23:54.053038Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5111:4360], ActorId: [2:5115:4363], Got response [2:5117:4364] SUCCESS 2026-01-10T12:23:54.053536Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:54.091557Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-10T12:23:54.091644Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3853:3643] >> HttpRequest::Status >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2965:2117] 2026-01-10T12:23:36.690045Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.691656Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.692220Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.694976Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.695796Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.696252Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.696289Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.696716Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.704227Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.704369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.704511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.704611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.704679Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.704724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-10T12:23:36.716216Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.716334Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.758793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.758993Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.759116Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.759246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.759415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.759549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.759596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.759647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.770369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.770471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.781287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.781448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:36.782558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:36.782599Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:36.782771Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:36.782815Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:36.794952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } } 2026-01-10T12:23:36.796036Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-10T12:23:36.796111Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-10T12:23:36.796131Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-10T12:23:36.796146Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-10T12:23:36.796162Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-10T12:23:36.796181Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-10T12:23:36.796205Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-10T12:23:36.796242Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-10T12:23:36.796268Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-10T12:23:36.796298Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-10T12:23:36.796327Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-10T12:23:36.796348Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-10T12:23:36.796365Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-10T12:23:36.796379Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-10T12:23:36.796398Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-10T12:23:36.796425Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-10T12:23:36.796442Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-10T12:23:36.796456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-10T12:23:36.796476Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-10T12:23:36.796503Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-10T12:23:36.796520Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-10T12:23:36.796534Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk ... UG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:56.267268Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:56.267405Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:56.267452Z node 101 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:56.267629Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:56.285474Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:56.285659Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:56.285793Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:56.285916Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:56.286011Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:56.286078Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [101:28:2070] sender: [101:49:2057] recipient: [101:11:2058] 2026-01-10T12:23:56.297756Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:56.297921Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:56.320610Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:56.320776Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:56.320856Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:56.320926Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:56.321064Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:56.321121Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:56.321154Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:56.321212Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:56.332247Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:56.332391Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:56.343116Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:56.343236Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:56.344429Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:56.344481Z node 101 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:56.344679Z node 101 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:56.344738Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:56.345423Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-10T12:23:56.346275Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:29:2057] recipient: [102:24:2068] 2026-01-10T12:23:56.555759Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:56.556686Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:56.556980Z node 102 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:56.558389Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:56.558675Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:56.558883Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:56.558914Z node 102 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:56.559095Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:56.568931Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:56.569074Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:56.569194Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:56.569301Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:56.569412Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:56.569503Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:49:2057] recipient: [102:11:2058] 2026-01-10T12:23:56.580848Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:56.581019Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:56.602351Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:56.602503Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:56.602590Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:56.602666Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:56.602773Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:56.602839Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:56.602905Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:56.602968Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:56.613760Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:56.613907Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:56.626696Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:56.626835Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:56.628335Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:56.628391Z node 102 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:56.628615Z node 102 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:56.628665Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:56.631175Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-10T12:23:56.632454Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze >> BasicStatistics::TwoServerlessDbs >> ColumnStatistics::CountMinSketchStatistics >> AnalyzeColumnshard::Analyze [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuildUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.655599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.655648Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.668670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.668760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.668884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.674525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.674672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.675155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.675304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.676761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.676893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.677770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.677815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.677867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.677899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.677931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.678083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.794772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... fy, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-10T12:23:54.635244Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710760 2026-01-10T12:23:54.635287Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-10T12:23:54.635328Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-10T12:23:54.635352Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-10T12:23:54.635409Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-10T12:23:54.637082Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-10T12:23:54.637129Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-10T12:23:54.637194Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-10T12:23:54.637303Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1559:3422], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-10T12:23:54.639107Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-10T12:23:54.639270Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1559:3422], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:23:54.639338Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-10T12:23:54.641433Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-10T12:23:54.641568Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1559:3422], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-10T12:23:54.641612Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-10T12:23:54.641827Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:23:54.641909Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1658:3510] TestWaitNotification: OK eventTxId 102 2026-01-10T12:23:54.646051Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-10T12:23:54.646322Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-10T12:23:54.649621Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:23:54.649963Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 371us result status StatusSuccess 2026-01-10T12:23:54.650531Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:54.655142Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:23:54.655418Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 324us result status StatusPathDoesNotExist 2026-01-10T12:23:54.655648Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.230811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.230875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.243922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.244016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.245802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.255623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.255938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.258715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.260807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.266054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.267059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.273086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.273234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.273293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.274112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.417815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.418796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.418949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.419980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... e publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:23:55.940857Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.940940Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.940972Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-10T12:23:55.941003Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 15 2026-01-10T12:23:55.941034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-10T12:23:55.941116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-10T12:23:55.944334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-10T12:23:55.944383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-10T12:23:55.944422Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-10T12:23:55.944601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2026-01-10T12:23:55.944794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000048 FAKE_COORDINATOR: advance: minStep5000048 State->FrontStep: 5000047 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000048 2026-01-10T12:23:55.945974Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000048, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:55.946095Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000048 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:23:55.946156Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710765:0, step: 5000048, at schemeshard: 72057594046678944 2026-01-10T12:23:55.946303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710765:0, at schemeshard: 72057594046678944 2026-01-10T12:23:55.946419Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-10T12:23:55.946468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-10T12:23:55.946523Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-10T12:23:55.946564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-10T12:23:55.946620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:23:55.946691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-10T12:23:55.946768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: false 2026-01-10T12:23:55.946828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-10T12:23:55.946889Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710765:0 2026-01-10T12:23:55.946940Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710765:0 2026-01-10T12:23:55.947017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-10T12:23:55.947066Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710765, publications: 2, subscribers: 1 2026-01-10T12:23:55.947114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 1], 18 2026-01-10T12:23:55.947157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 44], 18446744073709551615 2026-01-10T12:23:55.948118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.948418Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.950251Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:55.950299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:55.950496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 44] 2026-01-10T12:23:55.950658Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:55.950696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 1 2026-01-10T12:23:55.950734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 44 FAKE_COORDINATOR: Erasing txId 281474976710765 2026-01-10T12:23:55.951631Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.951731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.951773Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-10T12:23:55.951852Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-10T12:23:55.951916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:23:55.952849Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.952970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.953011Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-10T12:23:55.953044Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-10T12:23:55.953076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-10T12:23:55.953159Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710765, subscribers: 1 2026-01-10T12:23:55.953216Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-10T12:23:55.957873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.958076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-10T12:23:55.958175Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2026-01-10T12:23:55.958240Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710765 2026-01-10T12:23:55.962897Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:23:55.962971Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:1220:3112] TestWaitNotification: OK eventTxId 103 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:44.093465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:44.162290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:44.169527Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:44.169787Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:44.169841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:44.453811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:44.527780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:44.527874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:44.560576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:44.633735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:45.269594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:45.270913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:45.270958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:45.270988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:45.271143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:45.334596Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:45.839304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:48.449880Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:48.455577Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:48.457604Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:48.488991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.489138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.517476Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:48.519262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:48.696355Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.696468Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.697724Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.698183Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.698642Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.699380Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.699658Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.699861Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.699966Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.700132Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.700249Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.715227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:48.924189Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:48.956228Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:48.956319Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:48.983827Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:48.985420Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:48.985678Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:48.985751Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:48.985833Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:48.985941Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:48.986006Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:48.986056Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:48.986565Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:49.005436Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:49.005544Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:49.028748Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:49.029651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:49.078680Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:49.081674Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:49.094977Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:49.095031Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:49.095101Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:49.106362Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:49.129893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:49.154560Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:49.154697Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:49.165055Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:49.354795Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:49.372238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:49.646881Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:49.754535Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:49.754621Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:50.468538Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ce pool default not found or you don't have access permissions } 2026-01-10T12:22:16.548257Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4112:3712], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:16.548394Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:16.554525Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4129:3717], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:16.557798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:16.618770Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4111:3711], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:22:16.945348Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4198:3759], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:16.978905Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4197:3758] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:17.205140Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4219:3772]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:17.205443Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:17.205514Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4221:3774] 2026-01-10T12:22:17.205596Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4221:3774] 2026-01-10T12:22:17.205935Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4222:3775] 2026-01-10T12:22:17.206066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4222:3775], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:17.206127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:17.206214Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4221:3774], server id = [2:4222:3775], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:17.206286Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:17.206360Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4219:3772], StatRequests.size() = 1 2026-01-10T12:22:17.206536Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:23:13.757556Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpxpq60403ak3rjwrnqgd", SessionId: ydb://session/3?node_id=2&id=NDc5NDdlZmEtNDk5OTViYWItM2JlODdhN2YtM2ZkNTk4NDc=, Slow query, duration: 57.215630s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:13.758460Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4084:3697], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:13.758621Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4084:3697], Start read next stream part 2026-01-10T12:23:13.758830Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-10T12:23:13.759219Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:13.761346Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4084:3697], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:13.761397Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4084:3697], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmY4MzkyLWQ0NDYyMzA4LWEwYjM3OWE3LTllMTQ1MWQy, TxId: 2026-01-10T12:23:13.762174Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:13.762251Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], Start read next stream part 2026-01-10T12:23:13.837121Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4354:3894]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:13.837439Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:23:13.837491Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4354:3894], StatRequests.size() = 1 2026-01-10T12:23:55.969512Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxrnk27rjvxm8cvrq9vj47", SessionId: ydb://session/3?node_id=2&id=MTQxNTQxYzItY2I3MDRkNWEtNDEwOWM3MC1mZTczNjc5, Slow query, duration: 42.204550s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:55.971945Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:55.972114Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], Start read next stream part 2026-01-10T12:23:55.972575Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-10T12:23:55.973061Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4442:3977], ActorId: [2:4443:3978], Starting query actor #1 [2:4444:3979] 2026-01-10T12:23:55.973145Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4443:3978], ActorId: [2:4444:3979], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:23:55.977810Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4443:3978], ActorId: [2:4444:3979], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MWIzMzlhMTQtMjQ2OGQ1NGItZjBjYzZhMmMtNjExZmRkNw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:23:55.978493Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:55.978594Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4330:3874], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTBkMTA4OWMtZDA2NmQ4YjgtZGU0N2RlODItOTFhMjQ0OTE=, TxId: 2026-01-10T12:23:56.052616Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4461:3993]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:56.052973Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:56.053031Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4461:3993], StatRequests.size() = 1 2026-01-10T12:23:56.226749Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4443:3978], ActorId: [2:4444:3979], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWIzMzlhMTQtMjQ2OGQ1NGItZjBjYzZhMmMtNjExZmRkNw==, TxId: 2026-01-10T12:23:56.226826Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4443:3978], ActorId: [2:4444:3979], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWIzMzlhMTQtMjQ2OGQ1NGItZjBjYzZhMmMtNjExZmRkNw==, TxId: 2026-01-10T12:23:56.227142Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4442:3977], ActorId: [2:4443:3978], Got response [2:4444:3979] SUCCESS 2026-01-10T12:23:56.227389Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:56.266806Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:23:56.266892Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3153:3434] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] >> HttpRequest::ProbeBaseStats >> BasicStatistics::NotFullStatisticsDatashard >> BasicStatistics::Simple >> ColumnStatistics::CountMinSketchServerlessStatistics >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] >> BasicStatistics::TwoDatabases >> ColumnStatistics::EqWidthHistogramSmallParamTypes >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:42.492034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:42.568731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:42.574467Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:42.574768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:42.574828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:42.862827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:42.942954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:42.943074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:42.977057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.075438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:43.667162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:43.668150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:43.668185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:43.668212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:43.668317Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:43.730421Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:44.248984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:46.700098Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.708139Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:46.710863Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:46.743843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.743963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.772583Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:46.774621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:46.945876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.946005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.947498Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.948109Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.948735Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.949619Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.950032Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.950232Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.950441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.950670Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.950815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.966362Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.168188Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.199534Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:47.199635Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:47.230447Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:47.231607Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:47.231762Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:47.231818Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:47.231861Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:47.231901Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:47.231941Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:47.231976Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:47.232429Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:47.247070Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.247144Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.264256Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:21:47.265039Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:47.317625Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:21:47.320005Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:47.334441Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:21:47.334491Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:21:47.334553Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:47.342474Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:47.359502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.378128Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:47.378261Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:47.386632Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:47.530548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:47.563116Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:47.812824Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:47.950807Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:47.950889Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:21:48.627251Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... echecking } 2026-01-10T12:22:14.807353Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4198:3759], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:14.845505Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4197:3758] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:15.052477Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4219:3772]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:15.052688Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:15.052772Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4221:3774] 2026-01-10T12:22:15.052823Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4221:3774] 2026-01-10T12:22:15.053162Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4222:3775] 2026-01-10T12:22:15.053286Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4222:3775], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:15.053340Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:15.053427Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4221:3774], server id = [2:4222:3775], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:15.053487Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:15.053547Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4219:3772], StatRequests.size() = 1 2026-01-10T12:22:15.053693Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 ... waiting for TEvKqpScan (done) 2026-01-10T12:23:11.566708Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2803:3017];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-10T12:23:11.578061Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [2:4310:3856]. Got EvDeliveryProblem, TabletId: 72075186224037899, NotDelivered: 0, Starting 2026-01-10T12:23:11.578149Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [2:4310:3856]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037899, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-10T12:23:11.682468Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:4317:3861];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 2026-01-10T12:23:11.895926Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:23:11.896029Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR 2026-01-10T12:23:12.689516Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4087:3697], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:12.689802Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4087:3697], Start read next stream part 2026-01-10T12:23:12.690500Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpvmddnkd452ctk248rc4", SessionId: ydb://session/3?node_id=2&id=NjJkNjc3Y2YtYjkxOGFkNzAtNDlmZjFkYWEtNWYxMjlkNjU=, Slow query, duration: 58.270466s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:12.691228Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:12.696115Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:12.696315Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], Start read next stream part 2026-01-10T12:23:12.696444Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-10T12:23:12.701462Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4087:3697], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:12.701542Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4087:3697], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWNjZWI0YzktOThhNTc1M2MtMmM0MTc4ZmYtYmQ4M2QwZjA=, TxId: 2026-01-10T12:23:12.801621Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4474:3995]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:12.801979Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:23:12.802049Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4474:3995], StatRequests.size() = 1 2026-01-10T12:23:58.180768Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxrmhr51n5ej28bjtx75rx", SessionId: ydb://session/3?node_id=2&id=YTM2NTM5YmYtYTMyMzc0N2MtOGNjN2NmOGQtMjJjZTQ2NGI=, Slow query, duration: 45.479394s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:58.183195Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:58.183339Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], Start read next stream part 2026-01-10T12:23:58.183797Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-10T12:23:58.184346Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4565:4080], ActorId: [2:4567:4081], Starting query actor #1 [2:4568:4082] 2026-01-10T12:23:58.184424Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4567:4081], ActorId: [2:4568:4082], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:23:58.188920Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:58.188997Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4074:2462], ActorId: [2:4449:3974], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzZjN2I3OWEtZThiM2Y1M2QtYmIwY2RhOGEtNmRlZTA3MTM=, TxId: 2026-01-10T12:23:58.190204Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4567:4081], ActorId: [2:4568:4082], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTlhODM4YjMtMmM4Yzg3YzEtMTg0NTIyYjYtYjc2MTI4MmE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:23:58.281959Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4584:4096]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:58.282311Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:58.282361Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4584:4096], StatRequests.size() = 1 2026-01-10T12:23:58.522348Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4567:4081], ActorId: [2:4568:4082], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTlhODM4YjMtMmM4Yzg3YzEtMTg0NTIyYjYtYjc2MTI4MmE=, TxId: 2026-01-10T12:23:58.522461Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4567:4081], ActorId: [2:4568:4082], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTlhODM4YjMtMmM4Yzg3YzEtMTg0NTIyYjYtYjc2MTI4MmE=, TxId: 2026-01-10T12:23:58.522942Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4565:4080], ActorId: [2:4567:4081], Got response [2:4568:4082] SUCCESS 2026-01-10T12:23:58.523461Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:58.541784Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:23:58.541890Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3153:3434] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots >> TxUsage::WriteToTopic_Demo_41_Query >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] Test command err: 2026-01-10T12:20:50.015768Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706836693436554:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.015830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.166048Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.524171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.577708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.577805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.729189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.736264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706832398469215:2081] 1768047649979917 != 1768047649979920 2026-01-10T12:20:50.748835Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.829301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.054236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.105180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001a3d/r3tmp/yandexCILSAm.tmp 2026-01-10T12:20:51.105202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001a3d/r3tmp/yandexCILSAm.tmp 2026-01-10T12:20:51.106686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001a3d/r3tmp/yandexCILSAm.tmp 2026-01-10T12:20:51.106798Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.348210Z INFO: TTestServer started on Port 29394 GrpcPort 23262 PQClient connected to localhost:23262 2026-01-10T12:20:51.635096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.659512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.804356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.071056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:52.078762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.076727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853873306739:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.076900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.078102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853873306752:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.078153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853873306753:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.079106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.113608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.140312Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706853873306756:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.214078Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706853873306820:2646] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.718460Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706853873306835:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.732168Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjViN2IxZmItZjIyZGExNDgtNGRmY2FiNzMtNWU0YTdmZTQ=, ActorId: [1:7593706853873306737:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxmd5g86bsyb7tbte1wgr8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.734695Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.773377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.809718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.911995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.012282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706836693436554:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.012362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706858168274408:2825] === CheckClustersList. Ok 2026-01-10T12:21:01.452342Z :WriteToTopic_Demo_41_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.484837Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.505117Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706883938078415:2937] connected; active server actors: 1 2026-01-10T12:21:01.505352Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.522820Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.523324Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:01.528969Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.529876Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:01.530105Z node 1 :PER ... ][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.746975Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:59.746995Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.747008Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:59.756331Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:59.756386Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.756401Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:59.756422Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.756436Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:59.847109Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:59.847159Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.847174Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:59.847196Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.847210Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:59.856842Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:59.856895Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.856909Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:59.856930Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:59.856943Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:59.999877Z :INFO: [/Root] [/Root] [7baefcce-4f9fc738-8fdec71-f6417400] Closing read session. Close timeout: 0.000000s 2026-01-10T12:23:59.999957Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:319:320 2026-01-10T12:24:00.000004Z :INFO: [/Root] [/Root] [7baefcce-4f9fc738-8fdec71-f6417400] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2038 BytesRead: 20480000 MessagesRead: 320 BytesReadCompressed: 20480000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:24:00.000150Z :NOTICE: [/Root] [/Root] [7baefcce-4f9fc738-8fdec71-f6417400] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:24:00.000204Z :DEBUG: [/Root] [/Root] [7baefcce-4f9fc738-8fdec71-f6417400] [] Abort session to cluster 2026-01-10T12:24:00.000750Z :DEBUG: [/Root] 0x00007CFBAF8B2990 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_245146444128162903_v1 Close 2026-01-10T12:24:00.001174Z :DEBUG: [/Root] 0x00007CFBAF8B2990 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_245146444128162903_v1 Close 2026-01-10T12:24:00.001329Z :NOTICE: [/Root] [/Root] [7baefcce-4f9fc738-8fdec71-f6417400] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:24:01.444527Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7593707639302573171:2529]: session cookie 2 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:01.444582Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7593707639302573171:2529]: session cookie 2 consumer test-consumer session test-consumer_13_1_245146444128162903_v1grpc read failed 2026-01-10T12:24:01.444625Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7593707639302573171:2529]: session cookie 2 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 grpc closed 2026-01-10T12:24:01.444676Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7593707639302573171:2529]: session cookie 2 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 proxy is DEAD 2026-01-10T12:24:01.445359Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.445381Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.445394Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.445412Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.445425Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.445490Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.445501Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.445510Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.445523Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.445533Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.445536Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:01.445547Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 grpc read failed 2026-01-10T12:24:01.445566Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 grpc closed 2026-01-10T12:24:01.445588Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_245146444128162903_v1 is DEAD 2026-01-10T12:24:01.445882Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:24:01.447190Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7593707639302573163:2524] disconnected. 2026-01-10T12:24:01.447227Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7593707639302573163:2524] disconnected; active server actors: 1 2026-01-10T12:24:01.447248Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7593707639302573163:2524] client test-consumer disconnected session test-consumer_13_1_245146444128162903_v1 2026-01-10T12:24:01.447341Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_245146444128162903_v1 2026-01-10T12:24:01.447388Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7593707639302573166:2527] destroyed 2026-01-10T12:24:01.448199Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_245146444128162903_v1 2026-01-10T12:24:01.545436Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.545482Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.545497Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.545539Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.545555Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.545907Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.545934Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.545945Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.545962Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.545974Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.645766Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.645814Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.645831Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.645853Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.645869Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.646232Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.646261Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.646273Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.646291Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.646304Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> BasicStatistics::TwoServerlessTwoSharedDbs |93.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-10T12:20:53.368112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706849686070178:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:53.368198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:53.456535Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:53.750912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:53.751037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:53.882846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:53.909357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:53.910617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:54.212895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:54.213435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001996/r3tmp/yandexzNt8rR.tmp 2026-01-10T12:20:54.213446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001996/r3tmp/yandexzNt8rR.tmp 2026-01-10T12:20:54.213658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001996/r3tmp/yandexzNt8rR.tmp 2026-01-10T12:20:54.213753Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:54.280531Z INFO: TTestServer started on Port 14183 GrpcPort 16539 2026-01-10T12:20:54.372228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:16539 2026-01-10T12:20:54.531976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:54.541194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:54.553679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:54.618229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.764652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:57.009225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706866865940279:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.009307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706866865940289:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.012276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.016187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706866865940295:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.016288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:57.017439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:57.033402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706866865940294:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:57.106739Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706866865940360:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:57.388739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:57.414059Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706866865940368:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:57.416621Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTFlMjY2OGUtMzk5MWVhNzMtODlhNjk4ZGMtOTRlODllZGI=, ActorId: [1:7593706866865940263:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxmg1e7vfd74dcah7ypdx9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:57.418991Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:57.436982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:57.521588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593706866865940643:2819] 2026-01-10T12:20:58.362404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706849686070178:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:58.362507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-10T12:21:04.143143Z :WriteToTopic_Demo_12_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:04.161757Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:04.193949Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706896930711956:2938] connected; active server actors: 1 2026-01-10T12:21:04.194203Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:04.195996Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:04.196148Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:04.199159Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:04.200242Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:04.200493Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-10T12:21:04.200531Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037 ... unters: { Errors: 0 CurrentSessionLifetimeMs: 2006 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:23:55.697296Z :NOTICE: [/Root] [/Root] [34d07f33-aa3b8ce4-cdba5813-1e0b9dac] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:23:55.697354Z :DEBUG: [/Root] [/Root] [34d07f33-aa3b8ce4-cdba5813-1e0b9dac] [] Abort session to cluster 2026-01-10T12:23:55.697893Z :DEBUG: [/Root] 0x00007D32FFEE7D90 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_7496646548566858968_v1 Close 2026-01-10T12:23:55.698287Z :DEBUG: [/Root] 0x00007D32FFEE7D90 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_7496646548566858968_v1 Close 2026-01-10T12:23:55.698426Z :NOTICE: [/Root] [/Root] [34d07f33-aa3b8ce4-cdba5813-1e0b9dac] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:23:55.698853Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 grpc read done: success# 0, data# { } 2026-01-10T12:23:55.698885Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 grpc read failed 2026-01-10T12:23:55.698919Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 grpc closed 2026-01-10T12:23:55.698962Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 is DEAD 2026-01-10T12:23:55.700463Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [14:7593707624497503002:2526] disconnected. 2026-01-10T12:23:55.700500Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [14:7593707624497503002:2526] disconnected; active server actors: 1 2026-01-10T12:23:55.700523Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [14:7593707624497503002:2526] client test-consumer disconnected session test-consumer_14_1_7496646548566858968_v1 2026-01-10T12:23:55.700618Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_14_1_7496646548566858968_v1 2026-01-10T12:23:55.700652Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593707624497503005:2529] destroyed 2026-01-10T12:23:55.700715Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7593707624497503012:2531]: session cookie 2 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 grpc read done: success# 0, data# { } 2026-01-10T12:23:55.700735Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7593707624497503012:2531]: session cookie 2 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1grpc read failed 2026-01-10T12:23:55.700763Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7593707624497503012:2531]: session cookie 2 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 grpc closed 2026-01-10T12:23:55.700785Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7593707624497503012:2531]: session cookie 2 consumer test-consumer session test-consumer_14_1_7496646548566858968_v1 proxy is DEAD 2026-01-10T12:23:55.701339Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:399: Direct read cache: DestroyPartitionSession, sessionId: test-consumer_14_1_7496646548566858968_v1, proxy: [14:7593707624497503012:2531] 2026-01-10T12:23:55.701372Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_14_1_7496646548566858968_v1 2026-01-10T12:23:55.704304Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.704339Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.704356Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.704377Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.704394Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.712333Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-10T12:23:55.712401Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0] PartitionId [0] Generation [2] Write session will now close 2026-01-10T12:23:55.712600Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:23:55.712834Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-10T12:23:55.713060Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-10T12:23:55.713118Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-10T12:23:55.716478Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0 grpc read done: success: 0 data: 2026-01-10T12:23:55.716514Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0 grpc read failed 2026-01-10T12:23:55.716554Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0 grpc closed 2026-01-10T12:23:55.716578Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|1d6036bf-5f081f3b-6609b38e-b4cc5b61_0 is DEAD 2026-01-10T12:23:55.717471Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:23:55.721071Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593707615907568345:2504] destroyed 2026-01-10T12:23:55.721133Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:23:55.721169Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.721191Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.721208Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.721230Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.721247Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.767820Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.767865Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.767881Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.767901Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.767918Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.804663Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.804698Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.804709Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.804723Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.804736Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.869663Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.869705Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.869722Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.869745Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.869762Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.905457Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.905499Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.905516Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.905537Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.905557Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:23:55.971367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:23:55.971403Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.971418Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:23:55.971440Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:23:55.971477Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |93.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 15848265803732742629 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-10T12:23:38.326408Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.328288Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.330352Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.337136Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.337275Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.502256Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.511138Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.663641Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.743536Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.841549Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.849318Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.924948Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.925325Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.935637Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:38.950381Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.031689Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.049648Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.227616Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.248896Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.265673Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.266225Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.385981Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.392821Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.524326Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.532344Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.540551Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.551742Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.559929Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.569360Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.581222Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.595284Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.595911Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.704063Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.712567Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.729516Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.738421Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.765274Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.960573Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.992506Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:39.992891Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.011566Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.019139Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.227963Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.239959Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.293015Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.428655Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.461374Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.461773Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.470721Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.487215Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.681101Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.692390Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.703265Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.711503Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.721103Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.728879Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.827151Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.839224Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:40.912903Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.093881Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.121394Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.132733Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.223276Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.237585Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.292779Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.304259Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.314331Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.333899Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.441093Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.469611Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.479990Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:707] 2026-01-10T12:23:41.721625Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [ ... k read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-10T12:23:52.657567Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.661368Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.664613Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.671653Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.673298Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.705740Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.743319Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.785638Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.803545Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:52.851929Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.218799Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.304379Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.305990Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.417713Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.659279Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.674305Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.790678Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.945091Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:53.975617Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.013962Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.015234Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.028494Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.202669Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.214638Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.333031Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.345586Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.477043Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.490078Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.590201Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.704880Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.706024Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.825494Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:54.840350Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.258382Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.272095Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.302735Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.422844Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.438599Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.439007Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.530215Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.716664Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.741898Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:55.797737Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.001852Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.043985Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.071641Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.072778Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.100463Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.123520Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.494080Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.517556Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.551918Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.578267Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.708007Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.709228Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.912558Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:56.931650Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.052983Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.067058Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.213163Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.409188Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.422922Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.565095Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.583993Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.601816Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.646508Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.787716Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:57.822853Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:58.023484Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:58.139874Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] 2026-01-10T12:23:58.140864Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:756] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-10T12:20:50.021864Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706835600562314:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.021931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.183673Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.621608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.621719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.711154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.745824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.748173Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706831305594859:2081] 1768047650004665 != 1768047650004668 2026-01-10T12:20:50.754693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.940176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.024333Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.105989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001a3c/r3tmp/yandexnmtYRK.tmp 2026-01-10T12:20:51.106020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001a3c/r3tmp/yandexnmtYRK.tmp 2026-01-10T12:20:51.106150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001a3c/r3tmp/yandexnmtYRK.tmp 2026-01-10T12:20:51.106217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.345411Z INFO: TTestServer started on Port 2037 GrpcPort 26176 PQClient connected to localhost:26176 2026-01-10T12:20:51.638263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.649087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:51.661165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.819657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.972544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:51.985270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.182823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852780432379:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.182942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.183349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852780432391:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.183405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852780432392:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.183531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.189759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.215851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706852780432395:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.305943Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706852780432459:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.730943Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706852780432471:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.733572Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZThhYmI2MTUtOTEwMjFmODctNDJkYTZkYjYtZDY1YzhlMTc=, ActorId: [1:7593706852780432376:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxmd8hbsdxwt0hx2mv7krp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.735634Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.775272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.809768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.912416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.023093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706835600562314:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.023165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706857075400049:2825] === CheckClustersList. Ok 2026-01-10T12:21:01.602839Z :WriteToTopic_Demo_19_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.629487Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.674496Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.675361Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:01.675592Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-10T12:21:01.675640Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:21:01.675682Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-10T12:21:01.692828Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7593706882845204042:2929], now have 1 active actors on pipe 2026-01-10T12:21:01.693360Z ... .cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.820690Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.841909Z :INFO: [/Root] [/Root] [bf25db64-9c76757a-af7eaab4-ce3af430] Closing read session. Close timeout: 0.000000s 2026-01-10T12:24:01.841982Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2026-01-10T12:24:01.842045Z :INFO: [/Root] [/Root] [bf25db64-9c76757a-af7eaab4-ce3af430] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2005 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:24:01.842166Z :NOTICE: [/Root] [/Root] [bf25db64-9c76757a-af7eaab4-ce3af430] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:24:01.842236Z :DEBUG: [/Root] [/Root] [bf25db64-9c76757a-af7eaab4-ce3af430] [] Abort session to cluster 2026-01-10T12:24:01.842799Z :DEBUG: [/Root] 0x00007DC38FF97190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_108311787649300505_v1 Close 2026-01-10T12:24:01.843187Z :DEBUG: [/Root] 0x00007DC38FF97190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_108311787649300505_v1 Close 2026-01-10T12:24:01.843321Z :NOTICE: [/Root] [/Root] [bf25db64-9c76757a-af7eaab4-ce3af430] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:24:01.845211Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:01.845247Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 grpc read failed 2026-01-10T12:24:01.845325Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 closed 2026-01-10T12:24:01.845381Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 is DEAD 2026-01-10T12:24:01.845768Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7593707646451779267:2523]: session cookie 2 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:01.845791Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7593707646451779267:2523]: session cookie 2 consumer test-consumer session test-consumer_13_1_108311787649300505_v1grpc read failed 2026-01-10T12:24:01.845820Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7593707646451779267:2523]: session cookie 2 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 grpc closed 2026-01-10T12:24:01.845844Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7593707646451779267:2523]: session cookie 2 consumer test-consumer session test-consumer_13_1_108311787649300505_v1 proxy is DEAD 2026-01-10T12:24:01.846564Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7593707646451779259:2518] disconnected. 2026-01-10T12:24:01.846605Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7593707646451779259:2518] disconnected; active server actors: 1 2026-01-10T12:24:01.846642Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7593707646451779259:2518] client test-consumer disconnected session test-consumer_13_1_108311787649300505_v1 2026-01-10T12:24:01.846751Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_108311787649300505_v1 2026-01-10T12:24:01.846793Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7593707646451779262:2521] destroyed 2026-01-10T12:24:01.846836Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_108311787649300505_v1 2026-01-10T12:24:01.846996Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-10T12:24:01.847250Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session will now close 2026-01-10T12:24:01.847315Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-10T12:24:01.849411Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-10T12:24:01.849472Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:24:01.849528Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:24:01.849566Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2026-01-10T12:24:01.849641Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-10T12:24:01.849213Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0 grpc read done: success: 0 data: 2026-01-10T12:24:01.849254Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0 grpc read failed 2026-01-10T12:24:01.849290Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0 grpc closed 2026-01-10T12:24:01.849305Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|a242eb53-c1c05793-e1eb018c-ce161ca8_0 is DEAD 2026-01-10T12:24:01.849756Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:01.849884Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7593707637861844605:2497] destroyed 2026-01-10T12:24:01.849925Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:01.849954Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.849974Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.849990Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.850010Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.850022Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.904549Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.904601Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.904622Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.904645Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.904663Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:01.921269Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:01.921309Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.921327Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:01.921354Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:01.921372Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:02.004686Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:02.004722Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.004738Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:02.004756Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.004772Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:02.024211Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:02.024245Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.024261Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:02.024279Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.024294Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:02.104924Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:02.104961Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.104974Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:02.104989Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:02.105001Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |93.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:43.309521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:43.382555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:43.387864Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:43.388180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:43.388244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:43.655109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:43.731480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:43.731577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:43.765230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:43.840271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:44.424663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:44.425640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:44.425670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:44.425689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:44.425800Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:44.486223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:44.991496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:47.367379Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:47.367473Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:47.380675Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:47.404434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.404510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.440842Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:47.442254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.594825Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:47.594901Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:47.595976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.596519Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597078Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597786Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597855Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.597962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.598057Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:47.612848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:47.794521Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:47.815536Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:47.815610Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:47.844586Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:47.845447Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:47.845613Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:47.845655Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:47.845688Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:47.845725Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:47.845759Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:47.845792Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:47.846386Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:47.855327Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.855407Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:47.871440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:21:47.872318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:21:47.915795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:21:47.918902Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:47.932921Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Describe result: PathErrorUnknown 2026-01-10T12:21:47.932967Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Creating table 2026-01-10T12:21:47.933027Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:47.937666Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:47.940730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:47.946289Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:47.946383Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:47.958998Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:48.103585Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:48.139745Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:48.412730Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:48.540183Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:48.540246Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Column diff is empty, finishing 2026-01-10T12:21:49.228106Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... default not found or you don't have access permissions } 2026-01-10T12:22:14.782875Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4148:3727], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:14.782981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:22:14.788016Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4166:3733], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:14.790279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:22:14.832041Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4149:3728], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:22:15.098201Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4233:3774], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:22:15.121149Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4232:3773] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:22:15.290989Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4254:3787]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:15.291216Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:15.291287Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4256:3789] 2026-01-10T12:22:15.291352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4256:3789] 2026-01-10T12:22:15.291618Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4257:3790] 2026-01-10T12:22:15.291704Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4257:3790], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:15.291749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:15.291872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4256:3789], server id = [2:4257:3790], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:15.291956Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:15.292017Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4254:3787], StatRequests.size() = 1 2026-01-10T12:22:15.292174Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:23:15.328225Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxpvyr38zppjc19yx6devr", SessionId: ydb://session/3?node_id=2&id=ZDJhYTI1ODgtODAyMzI0NmUtNjA1MDUzMmUtMzNlZjU3Njk=, Slow query, duration: 60.577869s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:15.329254Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4121:3713], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:15.329443Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4121:3713], Start read next stream part 2026-01-10T12:23:15.329922Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:15.331979Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-10T12:23:15.332596Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:15.332685Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], Start read next stream part 2026-01-10T12:23:15.335080Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4121:3713], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:15.335138Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4121:3713], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzI0MTRkN2QtNzhlYzRjOTUtMTA4MzY0Ny02NTA4OGFmNA==, TxId: 2026-01-10T12:23:15.426164Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4393:3912]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:15.426377Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:23:15.426416Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4393:3912], StatRequests.size() = 1 2026-01-10T12:24:02.552110Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxrq4432et1095dzmw44q2", SessionId: ydb://session/3?node_id=2&id=OTk3Y2E2MzMtMWY1NTg1MDItYTdmY2MxYzQtMWRhMWVhYzM=, Slow query, duration: 47.215632s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:24:02.554427Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:24:02.554566Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], Start read next stream part 2026-01-10T12:24:02.555024Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-10T12:24:02.555614Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4481:3995], ActorId: [2:4483:3996], Starting query actor #1 [2:4484:3997] 2026-01-10T12:24:02.555691Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4483:3996], ActorId: [2:4484:3997], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:24:02.559431Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:24:02.559504Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4111:2460], ActorId: [2:4367:3890], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDgwZDE5ZDMtZDcxMWViMzYtMTYxYzQxYjQtY2ZkOTc2NTY=, TxId: 2026-01-10T12:24:02.560549Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4483:3996], ActorId: [2:4484:3997], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWMyN2U1YzQtZTgyZGVmMTYtOTZiODgyM2MtZjdhNzRiYTg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:24:02.654827Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4500:4011]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:02.655131Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:24:02.655182Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4500:4011], StatRequests.size() = 1 2026-01-10T12:24:02.853104Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4483:3996], ActorId: [2:4484:3997], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWMyN2U1YzQtZTgyZGVmMTYtOTZiODgyM2MtZjdhNzRiYTg=, TxId: 2026-01-10T12:24:02.853215Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4483:3996], ActorId: [2:4484:3997], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWMyN2U1YzQtZTgyZGVmMTYtOTZiODgyM2MtZjdhNzRiYTg=, TxId: 2026-01-10T12:24:02.853667Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4481:3995], ActorId: [2:4483:3996], Got response [2:4484:3997] SUCCESS 2026-01-10T12:24:02.854077Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:24:02.906542Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:24:02.906639Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3182:3446] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:49.006245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:49.006358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:49.006401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:49.006437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:49.006498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:49.006540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:49.006602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:49.006669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:49.007499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:49.008897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:49.107017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:49.107083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:49.119957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:49.120070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:49.122918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:49.133519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:49.133834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:49.137173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:49.138939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:49.145908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:49.146836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:49.153467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:49.153563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:49.153671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:49.153720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:49.153767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:49.154012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:49.298161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.299997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.300057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.300121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.300177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.300237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.300280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:24:06.547399Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:06.547455Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:24:06.547510Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:06.547556Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:24:06.547589Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:24:06.547728Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:24:06.547783Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:24:06.547815Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-10T12:24:06.547852Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-10T12:24:06.549080Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2210], Recipient [10:127:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 10 } 2026-01-10T12:24:06.549127Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-10T12:24:06.549192Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:06.549268Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:06.549303Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:24:06.549361Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:24:06.549421Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:06.549521Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:24:06.550379Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2210], Recipient [10:127:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 } 2026-01-10T12:24:06.550423Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-10T12:24:06.550483Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:06.550558Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:06.550587Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:24:06.550616Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-10T12:24:06.550651Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:24:06.550744Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:24:06.550788Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:24:06.551097Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [10:127:2151], Recipient [10:127:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:24:06.551165Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:24:06.551253Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:24:06.551330Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-10T12:24:06.551445Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:24:06.554117Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:24:06.554746Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:24:06.554776Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:24:06.555998Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:24:06.556037Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:24:06.556197Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:24:06.556519Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:24:06.556595Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:24:06.556997Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [10:849:2813], Recipient [10:127:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:24:06.557073Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:24:06.557123Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:24:06.557289Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [10:798:2762], Recipient [10:127:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-10T12:24:06.557325Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-10T12:24:06.557408Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:24:06.557522Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:24:06.557571Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:847:2811] 2026-01-10T12:24:06.557798Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [10:849:2813], Recipient [10:127:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:24:06.557838Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:24:06.557878Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-10T12:24:06.558279Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [10:850:2814], Recipient [10:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:24:06.558348Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:24:06.558471Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:24:06.558685Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 228us result status StatusPathDoesNotExist 2026-01-10T12:24:06.558853Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TReplicationTests::CopyReplicatedTable [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] Test command err: 2026-01-10T12:20:49.996361Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706831795670095:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.003746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.055479Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.320158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.504384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.504539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.568189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706831795670045:2081] 1768047649968977 != 1768047649968980 2026-01-10T12:20:50.592503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.596447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.629303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:51.015029Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.104742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00199a/r3tmp/yandexF5sa2D.tmp 2026-01-10T12:20:51.104764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00199a/r3tmp/yandexF5sa2D.tmp 2026-01-10T12:20:51.106185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00199a/r3tmp/yandexF5sa2D.tmp 2026-01-10T12:20:51.106274Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.139830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.344008Z INFO: TTestServer started on Port 1293 GrpcPort 27536 PQClient connected to localhost:27536 2026-01-10T12:20:51.641931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.775935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.997539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:52.016610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.135897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853270507567:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.136043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.140229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853270507580:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.140343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853270507581:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.140526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.145906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.169241Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706853270507584:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.254247Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706853270507648:2647] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.718561Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706853270507656:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.730354Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDBiN2NiMy1mNGFiZjBjZS05NWI2MDlhLWQ5NGM4ZWNi, ActorId: [1:7593706853270507548:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxmd5ges9rvkccynxqd4k8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.733912Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.772704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.808958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.907478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:54.996210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706831795670095:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.005714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706857565475245:2827] === CheckClustersList. Ok 2026-01-10T12:21:01.591653Z :Sinks_Oltp_WriteToTopic_1_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.625337Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.661140Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706883335279251:2938] connected; active server actors: 1 2026-01-10T12:21:01.661364Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.669186Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.669356Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:01.678651Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.679484Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, T ... 26-01-10T12:24:03.688956Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [14:7593707646922379696:2545] disconnected. 2026-01-10T12:24:03.688981Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [14:7593707646922379696:2545] disconnected; active server actors: 1 2026-01-10T12:24:03.689002Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [14:7593707646922379696:2545] client test-consumer disconnected session test-consumer_14_1_1807811725396312626_v1 2026-01-10T12:24:03.692632Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:24:03.692704Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:24:03.692755Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:24:03.693226Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:24:03.693269Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:24:03.693805Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0 grpc read done: success: 0 data: 2026-01-10T12:24:03.693833Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0 grpc read failed 2026-01-10T12:24:03.693867Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0 grpc closed 2026-01-10T12:24:03.693883Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|c58800d2-2a7480e8-8ec35c9f-f93180e1_0 is DEAD 2026-01-10T12:24:03.694687Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:03.694728Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:03.697917Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7593707642627412295:2525] destroyed 2026-01-10T12:24:03.697964Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7593707642627412298:2525] destroyed 2026-01-10T12:24:03.698005Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:03.698036Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.698057Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.698073Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.698094Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.698110Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.700282Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:24:03.700321Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:24:03.700360Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:24:03.700716Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:24:03.700750Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:24:03.702849Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0 grpc read done: success: 0 data: 2026-01-10T12:24:03.702876Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0 grpc read failed 2026-01-10T12:24:03.702902Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0 grpc closed 2026-01-10T12:24:03.702913Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|f9a44ad9-73bac98-68a5092d-a1be9b46_0 is DEAD 2026-01-10T12:24:03.703885Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:03.703922Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:03.704174Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593707642627412250:2517] destroyed 2026-01-10T12:24:03.704207Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593707642627412253:2517] destroyed 2026-01-10T12:24:03.704238Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:03.704266Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.704286Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.704301Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.704319Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.704347Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.744422Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.744465Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.744481Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.744504Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.744520Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.763456Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.763492Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.763509Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.763532Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.763547Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.773951Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.773986Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.774002Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.774025Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.774040Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.844774Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.844822Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.844838Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.844871Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.844891Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.864603Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.864635Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.864650Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.864672Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.864688Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:03.882940Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:03.882978Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.882991Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:03.883011Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:03.883024Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |93.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:49.006239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:49.006340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:49.006391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:49.006430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:49.006492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:49.006548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:49.006645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:49.006713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:49.007570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:49.008931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:49.107016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:49.107084Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:49.122334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:49.122429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:49.122911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:49.133583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:49.133830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:49.137170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:49.138940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:49.145748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:49.146841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:49.153473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:49.153560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:49.153648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:49.153701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:49.153743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:49.154008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:49.309389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.310992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:49.311478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 806 RawX2: 42949675732 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.507348Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-10T12:24:07.507522Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 806 RawX2: 42949675732 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.507615Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:24:07.507824Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 806 RawX2: 42949675732 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.507931Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:07.508022Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-10T12:24:07.510841Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.511230Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.523332Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 42949675652 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.523380Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:24:07.523500Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 42949675652 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.523543Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:24:07.523610Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 707 RawX2: 42949675652 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:24:07.523673Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:07.523719Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.523760Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:24:07.523797Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:24:07.523820Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:24:07.525559Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.526085Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.526168Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-10T12:24:07.526246Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-10T12:24:07.526309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-10T12:24:07.526416Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-10T12:24:07.526475Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-10T12:24:07.528342Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:07.528400Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:24:07.528555Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:24:07.528597Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:07.528645Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:24:07.528686Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:07.528735Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:24:07.528821Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [10:733:2710] message: TxId: 102 2026-01-10T12:24:07.528880Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:07.528927Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:24:07.528965Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:24:07.529105Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:24:07.529146Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:24:07.530987Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:24:07.531055Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:837:2795] TestWaitNotification: OK eventTxId 102 2026-01-10T12:24:07.531734Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:24:07.532037Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 331us result status StatusSuccess 2026-01-10T12:24:07.532499Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest >> BasicStatistics::NotFullStatisticsColumnshard >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table [GOOD] |93.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardUserAttrsTest::SpecialAttributes >> HttpRequest::Status [GOOD] >> TMonitoringTests::InvalidActorId |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TExportToS3Tests::CompletedExportEndTime [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> HttpRequest::Analyze [GOOD] >> TMonitoringTests::InvalidActorId [GOOD] >> TExportToS3Tests::DisableAutoDropping >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:13.734449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:13.734532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:13.734605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:13.734634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:13.734656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:13.734702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:13.735652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:13.736913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:13.836413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:13.836480Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:13.854283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:13.854393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:13.856876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:13.873309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:13.873609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:13.874345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:13.875562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:13.883078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.884178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:13.889996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:13.890222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:13.890366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:13.890655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.066376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.067992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.068594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -10T12:24:15.020670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.020759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.020792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:24:15.020829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-10T12:24:15.020872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:24:15.020945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:24:15.026808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-10T12:24:15.027002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-10T12:24:15.027883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:15.028045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:15.028146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038, at schemeshard: 72057594046678944 2026-01-10T12:24:15.028312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-10T12:24:15.028490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:24:15.028559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:24:15.029176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:24:15.031347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:24:15.032996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:15.033068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:15.033233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:24:15.033333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:15.033367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:24:15.033428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:24:15.033817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:24:15.033886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:24:15.034012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:24:15.034060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:15.034113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:24:15.034143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:15.034179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:24:15.034257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:24:15.034291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:24:15.034325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:24:15.034411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:24:15.034448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:24:15.034479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-10T12:24:15.034524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-10T12:24:15.035426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.035521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.035561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:24:15.035605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:24:15.035649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:15.036668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.036768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:24:15.036802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:24:15.036833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:24:15.036869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-10T12:24:15.036955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:24:15.041410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:24:15.041712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-10T12:24:15.044813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:15.045127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:24:15.045270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2026-01-10T12:24:15.047860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:15.048121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:13.734449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:13.734540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:13.734617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:13.734654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:13.734686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:13.734768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:13.735701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:13.736903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:13.836426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:13.836492Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:13.854277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:13.854390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:13.857014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:13.869719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:13.870165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:13.873691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:13.875544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:13.883073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.884200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:13.889994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:13.890228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:13.890360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:13.890682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.053709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.055640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.055844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.055929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.056874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... meshard: 72057594046678944, cookie: 281474976710672 2026-01-10T12:24:14.949140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-10T12:24:14.949253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-10T12:24:14.949312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-10T12:24:14.956617Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-10T12:24:14.969316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:14.969535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.969707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:24:14.969755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:24:14.969953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:24:14.970029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:14.972526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:14.972688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:24:14.972903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.972964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:24:14.973003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:24:14.973036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:24:14.975012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.975061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:24:14.975092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:24:14.977015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.977060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.977106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:24:14.977142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:24:14.977259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:24:14.978909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:24:14.979038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:24:14.979348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:14.979464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:14.979514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:24:14.979820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:24:14.979878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:24:14.980042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:24:14.980151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:14.982181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:14.982233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:14.982427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:14.982510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:24:14.982801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.982851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:24:14.982951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:24:14.982980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:24:14.983013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:24:14.983039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:24:14.983064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:24:14.983092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:24:14.983128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:24:14.983164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:24:14.983233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:14.983260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:24:14.983300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:24:14.983817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:24:14.983915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:24:14.983957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:24:14.984003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:24:14.984060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:24:14.984172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:24:14.987475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:24:14.987983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:13.734472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:13.734563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:13.734667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:13.734703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:13.734733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:13.734809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:13.734881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:13.735757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:13.736906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:13.836985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:13.837040Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:13.854674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:13.854742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:13.856850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:13.879892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:13.880178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:13.880935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:13.881263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:13.883931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.884133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:13.890201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:13.890471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:13.890539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:13.890617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:13.890928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:14.081374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.082987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:14.083462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:24:15.066925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:24:15.067011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-10T12:24:15.071973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:15.072207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2026-01-10T12:24:15.072453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:24:15.072505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:24:15.072567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-10T12:24:15.072708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:24:15.074844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-10T12:24:15.075037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-10T12:24:15.075453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:15.075573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:15.075626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-10T12:24:15.075854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:24:15.075913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:24:15.075981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:24:15.076022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:24:15.076116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:15.076250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-10T12:24:15.076321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:15.076369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:24:15.076415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:24:15.076454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:24:15.076538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:24:15.076587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-10T12:24:15.076626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-10T12:24:15.079203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:15.079268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:15.079515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:15.079568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:24:15.080266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:24:15.080371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:24:15.080418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:24:15.080463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:24:15.080526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:15.080633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-10T12:24:15.082678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:24:15.082991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:24:15.083044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:24:15.083501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:24:15.083602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:24:15.083646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:752:2742] TestWaitNotification: OK eventTxId 103 2026-01-10T12:24:15.084215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:24:15.084433Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2026-01-10T12:24:15.085027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> HttpRequest::AnalyzeServerless [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:00.822213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:00.944835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:00.953741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:00.954060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:00.954251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:01.388377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:01.481491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:01.481621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:01.544366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:01.655058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:02.469569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:02.470774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:02.470838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:02.470892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:02.471232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:02.531600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:03.146693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:06.708400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:06.716619Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:06.720265Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:06.759951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.760096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.806351Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:06.808403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.002053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:07.002168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:07.003679Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.004317Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.004987Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.005713Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.005996Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.006131Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.006282Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.006464Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.006589Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.029818Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.337960Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:07.376149Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:07.376245Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:07.417699Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:07.418586Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:07.418834Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:07.418904Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:07.418956Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:07.419012Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:07.419072Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:07.419135Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:07.419707Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:07.431510Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.431612Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.461729Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:24:07.462023Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:24:07.534797Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:24:07.537302Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:07.549724Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:24:07.549784Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:24:07.549879Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:07.554938Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:07.559063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:07.566660Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:07.566797Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:07.585938Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:07.805475Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:07.833221Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:08.206634Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:08.313923Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:08.314010Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:24:08.958308Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ;this=137164462225184;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3006;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=131:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.335836Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2897:3075];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=137164462223840;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3006;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.358153Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;self_id=[2:2903:3080];ev=NActors::IEventHandle;tablet_id=72075186224037902;tx_id=281474976715659;this=137164512782880;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3007;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.358736Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2917:3086];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=137164512784000;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3007;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.359260Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2918:3087];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=137164512785120;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3007;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.383392Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2925:3092];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=137164512788928;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.383985Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2926:3093];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=137164512790048;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.403433Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2930:3096];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=137164512793408;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.403956Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2934:3098];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=137164512794528;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1906:2453];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.545414Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.545552Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.545603Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.579804Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.579893Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.579927Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.589657Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.589749Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.589785Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.597623Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.597729Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.597767Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.606970Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.607074Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.607110Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.615024Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.615138Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.615180Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.624639Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.624736Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.624776Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.635416Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.635504Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.635542Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.645165Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.645295Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.645338Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.653347Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.653443Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.653481Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000014s 2026-01-10T12:24:12.774374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3713:3672] 2026-01-10T12:24:12.802200Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:01.062428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:01.172675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:01.181499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:01.181910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:01.181975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:01.620778Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:01.743514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:01.743659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:01.784327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:01.873891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:02.657448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:02.658599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:02.658655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:02.658704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:02.659393Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:02.728638Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:03.349733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:06.817287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:06.823802Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:06.827742Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:06.871851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.872001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.915178Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:06.918193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.135870Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.148853Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.149844Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.150773Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.150912Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.151172Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.151355Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.151475Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.151620Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.292332Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:07.359015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:07.359145Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:07.373418Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.576258Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:07.627096Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:07.627208Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:07.662682Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:07.664536Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:07.664854Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:07.664928Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:07.664994Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:07.665068Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:07.665146Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:07.665225Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:07.665913Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:07.792975Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.793091Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.825043Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:24:07.825638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:24:08.083362Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:24:08.083885Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:08.086600Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:24:08.086668Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:24:08.086751Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:08.091183Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:08.095149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:08.108393Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:08.108548Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:08.134078Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:08.443535Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:08.706115Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:08.837584Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:08.837678Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:24:09.504398Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... =0;src=[2:1944:2446];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.835400Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2949:3086];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976710659;this=136211455770848;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.835886Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2950:3087];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976710659;this=136211455771968;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.836531Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;self_id=[2:2927:3073];ev=NActors::IEventHandle;tablet_id=72075186224037902;tx_id=281474976710659;this=136211455773088;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.869331Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2953:3090];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976710659;this=136211455779136;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.869979Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2954:3091];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976710659;this=136211455780256;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.870532Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2962:3094];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976710659;this=136211455781376;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:10.871052Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2964:3096];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976710659;this=136211455782496;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1944:2446];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.007190Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.007354Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.007413Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.036737Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.036807Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.036829Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.041606Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.041694Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.041723Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.049115Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.049198Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.049232Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.055146Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.055255Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.055305Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.062408Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.062506Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.062538Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.069543Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.069651Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.069688Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.076732Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.076823Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.076885Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.083980Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.084069Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.084128Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.091524Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.091621Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-10T12:24:11.091660Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000019s 2026-01-10T12:24:13.434263Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3800:3735] 2026-01-10T12:24:13.460799Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3769:3507] , Record { OperationId: "\000\000\000\000\0206*\273\303pI\266\304]~\007" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Database: "" } 2026-01-10T12:24:13.460884Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `6*pI]~', DatabaseName: `', Types: 2026-01-10T12:24:13.460937Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `6*pI]~', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: Answer: 'Analyze sent. OperationId: 000000041p5axw6w29pv25tzg7' |94.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::MoveGroups [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-10T12:20:50.041329Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706836219302451:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.041387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.105745Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.508730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.508836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.566252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.575770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.613289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706831924334944:2081] 1768047649982411 != 1768047649982414 2026-01-10T12:20:50.643728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.872963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.012087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.107188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0019b1/r3tmp/yandex1O3vED.tmp 2026-01-10T12:20:51.107221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0019b1/r3tmp/yandex1O3vED.tmp 2026-01-10T12:20:51.107378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0019b1/r3tmp/yandex1O3vED.tmp 2026-01-10T12:20:51.107475Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.356185Z INFO: TTestServer started on Port 14672 GrpcPort 27486 PQClient connected to localhost:27486 2026-01-10T12:20:51.715955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.742516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.808608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:52.032880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:52.046489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.117069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853399172456:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.117204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.117586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853399172476:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.117632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706853399172477:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.117755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.122322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.147208Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706853399172480:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.251331Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706853399172544:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.762938Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706853399172552:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.765812Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWY2NTYwNC0yY2FmYjM1Mi1kZTU5ZWRjMy1mMmVkYmM0Yw==, ActorId: [1:7593706853399172445:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxmd5y54s3dgh2yzra34kg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.768244Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.775284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.822349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.949570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.024362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706836219302451:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.024430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706857694140132:2824] === CheckClustersList. Ok 2026-01-10T12:21:01.354562Z :WriteToTopic_Demo_21_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.416420Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.471597Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706883463944141:2936] connected; active server actors: 1 2026-01-10T12:21:01.473276Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.475833Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.478188Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:01.484821Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.495954Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:01.497341Z n ... .cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.071009Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.088216Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.088260Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.088275Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.088295Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.088311Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.102201Z :INFO: [/Root] [/Root] [7a34b7d2-76773460-e7cb735d-96fd0ec2] Closing read session. Close timeout: 0.000000s 2026-01-10T12:24:07.102273Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:1:2 2026-01-10T12:24:07.102333Z :INFO: [/Root] [/Root] [7a34b7d2-76773460-e7cb735d-96fd0ec2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2072 BytesRead: 14000000 MessagesRead: 2 BytesReadCompressed: 14000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:24:07.102444Z :NOTICE: [/Root] [/Root] [7a34b7d2-76773460-e7cb735d-96fd0ec2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:24:07.102497Z :DEBUG: [/Root] [/Root] [7a34b7d2-76773460-e7cb735d-96fd0ec2] [] Abort session to cluster 2026-01-10T12:24:07.103015Z :DEBUG: [/Root] 0x00007D9EEA642190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_357719781008582284_v1 Close 2026-01-10T12:24:07.103764Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:07.103808Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 grpc read failed 2026-01-10T12:24:07.103843Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 grpc closed 2026-01-10T12:24:07.103894Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 is DEAD 2026-01-10T12:24:07.104917Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7593707672668842959:2530]: session cookie 2 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 grpc read done: success# 0, data# { } 2026-01-10T12:24:07.104954Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7593707672668842959:2530]: session cookie 2 consumer test-consumer session test-consumer_13_1_357719781008582284_v1grpc read failed 2026-01-10T12:24:07.104985Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7593707672668842959:2530]: session cookie 2 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 grpc closed 2026-01-10T12:24:07.105007Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7593707672668842959:2530]: session cookie 2 consumer test-consumer session test-consumer_13_1_357719781008582284_v1 proxy is DEAD 2026-01-10T12:24:07.105501Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7593707672668842950:2525] disconnected. 2026-01-10T12:24:07.105543Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7593707672668842950:2525] disconnected; active server actors: 1 2026-01-10T12:24:07.105568Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7593707672668842950:2525] client test-consumer disconnected session test-consumer_13_1_357719781008582284_v1 2026-01-10T12:24:07.106359Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_357719781008582284_v1 2026-01-10T12:24:07.106407Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7593707672668842953:2528] destroyed 2026-01-10T12:24:07.106448Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_357719781008582284_v1 2026-01-10T12:24:07.108956Z :DEBUG: [/Root] 0x00007D9EEA642190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_357719781008582284_v1 Close 2026-01-10T12:24:07.109144Z :NOTICE: [/Root] [/Root] [7a34b7d2-76773460-e7cb735d-96fd0ec2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:24:07.129827Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-10T12:24:07.129892Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session will now close 2026-01-10T12:24:07.129956Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-10T12:24:07.131063Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-10T12:24:07.131259Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:24:07.131351Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:24:07.131390Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2026-01-10T12:24:07.131472Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-10T12:24:07.136322Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0 grpc read done: success: 0 data: 2026-01-10T12:24:07.136360Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0 grpc read failed 2026-01-10T12:24:07.136414Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0 grpc closed 2026-01-10T12:24:07.136434Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|9068d1be-e1720346-40364702-3e770de6_0 is DEAD 2026-01-10T12:24:07.137408Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:24:07.138303Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7593707664078908291:2502] destroyed 2026-01-10T12:24:07.138356Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:07.138385Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.138406Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.138423Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.138441Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.138457Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.174849Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.174892Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.174907Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.174926Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.174943Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.192194Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.192235Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.192252Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.192273Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.192295Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.274823Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.274870Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.274886Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.274906Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.274924Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:07.293353Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:07.293379Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.293387Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:07.293397Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:07.293409Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist >> TExportToS3Tests::DisableAutoDropping [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table >> TExportToS3Tests::DecimalOutOfRange >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:23:58.974774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:23:59.093031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:23:59.102101Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:23:59.102596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:23:59.102702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:23:59.608253Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:59.732641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:59.732800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:59.767544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:59.843849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:00.895889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:00.897610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:00.897670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:00.898174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:00.898442Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:00.973271Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:01.559159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:05.067074Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:05.075540Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:05.078413Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:05.178945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.179050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.209577Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:05.211850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.430436Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.430571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.432415Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.433059Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.433570Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.434465Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.434883Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.435091Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.435199Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.435449Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.435601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.451671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.666269Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:05.718365Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:05.718471Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:05.754927Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:05.756521Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:05.756774Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:05.756831Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:05.756887Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:05.756937Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:05.757003Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:05.757061Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:05.757750Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:05.779745Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.779857Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.805623Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:24:05.806810Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:24:05.875016Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:24:05.878710Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:24:05.900703Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:24:05.900776Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:24:05.900857Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:24:05.917234Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:05.951433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:05.989707Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:05.989834Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:05.999123Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:06.230158Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:24:06.287890Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:06.617187Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:06.762954Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:06.763047Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:24:07.500860Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... rc=[2:2890:3012];cookie=101:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.053496Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[2:3576:3440];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976710661;this=137201382826272;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=131:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.089543Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[2:3591:3451];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976710661;this=137201382815296;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=121:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.090181Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[2:3594:3454];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976710661;this=137201382812608;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=111:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.090718Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:3599:3457];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976710661;this=137201382809024;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=91:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.091211Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;self_id=[2:3612:3465];ev=NActors::IEventHandle;tablet_id=72075186224037912;tx_id=281474976710661;this=137201382811488;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=141:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.112358Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;self_id=[2:3618:3469];ev=NActors::IEventHandle;tablet_id=72075186224037913;tx_id=281474976710661;this=137201382806560;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4512;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=151:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.112975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;self_id=[2:3619:3470];ev=NActors::IEventHandle;tablet_id=72075186224037914;tx_id=281474976710661;this=137201382805664;method=TTxController::StartProposeOnExecute;tx_info=281474976710661:TX_KIND_SCHEMA;min=4512;max=18446744073709551615;plan=0;src=[2:2890:3012];cookie=161:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.278446Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.278579Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.278633Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.310653Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.310751Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.310790Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.320659Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.320764Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.320806Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.330084Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.330181Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.330218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.338336Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.338438Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.338476Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.348104Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.348202Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.348238Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.354854Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.354930Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.354967Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.362203Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.362314Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.362357Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.369921Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.370005Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.370031Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.377087Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.377198Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-10T12:24:11.377238Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; waiting actualization: 0/0.000023s 2026-01-10T12:24:13.817357Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4399:4042] 2026-01-10T12:24:13.853573Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4371:3710] , Record { OperationId: "\000\000\000\000\025\323\013\245QK\275\324\001\212^\263" Tables { PathId { OwnerId: 72057594046644480 LocalId: 38 } } Database: "" } 2026-01-10T12:24:13.853681Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: ` QK^', DatabaseName: `', Types: 2026-01-10T12:24:13.853735Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: ` QK^', PathId: [OwnerId: 72057594046644480, LocalPathId: 38], ColumnTags: Answer: 'Analyze sent. OperationId: 00000005ek1ejn2jxxtg0rmqnk' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2965:2117] 2026-01-10T12:23:36.955279Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.956206Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.956501Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.958018Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.958458Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.958634Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.958651Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.958860Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.965477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.965604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.965738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.965833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.965892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.965931Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-10T12:23:36.977407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.977541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.011662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.011789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.011871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.011924Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.012037Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.012125Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.012161Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.012195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.022926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.023059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.033689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.033813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:37.034741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:37.034775Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:37.034913Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:37.034949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:37.045624Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2026-01-10T12:23:37.046719Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-10T12:23:37.046759Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-10T12:23:37.046774Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-10T12:23:37.046790Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-10T12:23:37.046807Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-10T12:23:37.046822Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-10T12:23:37.046835Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-10T12:23:37.046861Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-10T12:23:37.046898Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-10T12:23:37.046919Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-10T12:23:37.046951Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-10T12:23:37.046965Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-10T12:23:37.046980Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-10T12:23:37.046992Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-10T12:23:37.047005Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-10T12:23:37.047030Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-10T12:23:37.047043Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-10T12:23:37.047055Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-10T12:23:37.047067Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-10T12:23:37.047088Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-10T12:23:37.047105Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 8:1000 Path# /dev/disk1 2026-01-10T12:24:07.350938Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2026-01-10T12:24:07.351002Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2026-01-10T12:24:07.351060Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2026-01-10T12:24:07.351107Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2026-01-10T12:24:07.351157Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2026-01-10T12:24:07.351204Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2026-01-10T12:24:07.351260Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2026-01-10T12:24:07.351329Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2026-01-10T12:24:07.351385Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2026-01-10T12:24:07.351442Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2026-01-10T12:24:07.351484Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2026-01-10T12:24:07.351539Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2026-01-10T12:24:07.351600Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2026-01-10T12:24:07.351660Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2026-01-10T12:24:07.351708Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2026-01-10T12:24:07.351765Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2026-01-10T12:24:07.355225Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2026-01-10T12:24:07.355324Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2026-01-10T12:24:07.355389Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2026-01-10T12:24:07.355463Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2026-01-10T12:24:07.355504Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2026-01-10T12:24:07.355548Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2026-01-10T12:24:07.355581Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2026-01-10T12:24:07.355632Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2026-01-10T12:24:07.355680Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2026-01-10T12:24:07.355724Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2026-01-10T12:24:07.355766Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2026-01-10T12:24:07.355809Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2026-01-10T12:24:07.355864Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2026-01-10T12:24:07.355898Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2026-01-10T12:24:07.355952Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2026-01-10T12:24:07.355998Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2026-01-10T12:24:07.356054Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2026-01-10T12:24:07.356113Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2026-01-10T12:24:07.356142Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2026-01-10T12:24:07.356168Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2026-01-10T12:24:07.356212Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2026-01-10T12:24:07.356256Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2026-01-10T12:24:07.356299Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2026-01-10T12:24:07.356342Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2026-01-10T12:24:07.356388Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2026-01-10T12:24:07.356433Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2026-01-10T12:24:07.356481Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2026-01-10T12:24:07.356513Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2026-01-10T12:24:07.356555Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2026-01-10T12:24:07.356603Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2026-01-10T12:24:07.356649Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2026-01-10T12:24:07.356693Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2026-01-10T12:24:07.356739Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2026-01-10T12:24:07.356784Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2026-01-10T12:24:07.356835Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2026-01-10T12:24:07.356882Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2026-01-10T12:24:07.356928Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2026-01-10T12:24:07.356974Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2026-01-10T12:24:07.357003Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2026-01-10T12:24:07.357058Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2026-01-10T12:24:07.357105Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2026-01-10T12:24:07.357134Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2026-01-10T12:24:07.357188Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2026-01-10T12:24:07.357236Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2026-01-10T12:24:07.357284Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2026-01-10T12:24:07.357365Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2026-01-10T12:24:07.357396Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2026-01-10T12:24:07.357440Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2026-01-10T12:24:07.357485Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2026-01-10T12:24:07.357532Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2026-01-10T12:24:07.357572Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2026-01-10T12:24:07.357620Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2026-01-10T12:24:07.749403Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.404304s 2026-01-10T12:24:07.749620Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.404546s 2026-01-10T12:24:07.784484Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-10T12:24:07.882046Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2026-01-10T12:24:07.908859Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-10T12:24:07.989788Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2026-01-10T12:24:08.005893Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-10T12:24:08.100005Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2026-01-10T12:24:08.116702Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2026-01-10T12:23:46.108993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707594137605442:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.109067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.389182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.419718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.419942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.441245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.532811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.658357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.752026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.752061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.752069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.752172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.095761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.119265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:49.007845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508675:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.007959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.008236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508685:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.008284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.277805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.425111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508849:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508851:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.463580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-10T12:23:49.562435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508952:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.562597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.562988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508978:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508981:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508986:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508970:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508988:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508990:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508989:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508991:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.563906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022508997:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.565885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022509039:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.565887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022509042:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.565971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.566574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022509046:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.566654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.567249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707607022509061:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.567323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.567403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors ... d 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-10T12:24:09.618580Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YWQxMGZlYTktNTNkNjRjYzYtMmEyMjc3YmUtNDg4ZTk2NmM=, ActorId: [1:7593707607022508949:2380], ActorState: ExecuteState, LegacyTraceId: 01kekxtc3xdrwggyrs8efnp4s7, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-10T12:24:09.618925Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7593707692921941333:2355], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7593707607022508924:2355]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7593707692921941333:2355]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-10T12:24:09.618980Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593707692921941310:2355], SessionActorId: [1:7593707607022508924:2355], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7593707607022508924:2355]. 2026-01-10T12:24:09.619091Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OWI3OTEzNmItZDRkMTkzZS01NDdkODA5My05OTRmOTQ2ZQ==, ActorId: [1:7593707607022508924:2355], ActorState: ExecuteState, LegacyTraceId: 01kekxtc482qznt6qzgvbsvm2b, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7593707692921941312:2355] from: [1:7593707692921941310:2355] trace_id# 2026-01-10T12:24:09.619146Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593707692921941312:2355] TxId: 281474976723382. Ctx: { TraceId: 01kekxtc482qznt6qzgvbsvm2b, Database: , SessionId: ydb://session/3?node_id=1&id=OWI3OTEzNmItZDRkMTkzZS01NDdkODA5My05OTRmOTQ2ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-10T12:24:09.619324Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OWI3OTEzNmItZDRkMTkzZS01NDdkODA5My05OTRmOTQ2ZQ==, ActorId: [1:7593707607022508924:2355], ActorState: ExecuteState, LegacyTraceId: 01kekxtc482qznt6qzgvbsvm2b, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-10T12:24:09.619592Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7593707692921941343:2376], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7593707607022508945:2376]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7593707692921941343:2376]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-10T12:24:09.619649Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593707692921941311:2376], SessionActorId: [1:7593707607022508945:2376], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7593707607022508945:2376]. 2026-01-10T12:24:09.619750Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=Y2ZjNWM1NjQtMjI0N2E3ZDEtOTcxZTg0MDAtNTM3NDUxMTE=, ActorId: [1:7593707607022508945:2376], ActorState: ExecuteState, LegacyTraceId: 01kekxtc454w6j81mqj4srm4sk, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7593707692921941313:2376] from: [1:7593707692921941311:2376] trace_id# 2026-01-10T12:24:09.619847Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593707692921941313:2376] TxId: 281474976723383. Ctx: { TraceId: 01kekxtc454w6j81mqj4srm4sk, Database: , SessionId: ydb://session/3?node_id=1&id=Y2ZjNWM1NjQtMjI0N2E3ZDEtOTcxZTg0MDAtNTM3NDUxMTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-10T12:24:09.620009Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=Y2ZjNWM1NjQtMjI0N2E3ZDEtOTcxZTg0MDAtNTM3NDUxMTE=, ActorId: [1:7593707607022508945:2376], ActorState: ExecuteState, LegacyTraceId: 01kekxtc454w6j81mqj4srm4sk, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-10T12:24:09.624419Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7593707692921941344:2369], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7593707607022508938:2369]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7593707692921941344:2369]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-10T12:24:09.624477Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593707692921941315:2369], SessionActorId: [1:7593707607022508938:2369], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7593707607022508938:2369]. 2026-01-10T12:24:09.624579Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=YzJlNWU0MGUtYjY2YWI4YmYtNTI4M2Q5ZDktYzlmZTU2ODQ=, ActorId: [1:7593707607022508938:2369], ActorState: ExecuteState, LegacyTraceId: 01kekxtc482x1c6ayckw8zg93c, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7593707692921941317:2369] from: [1:7593707692921941315:2369] trace_id# 2026-01-10T12:24:09.624631Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593707692921941317:2369] TxId: 281474976723385. Ctx: { TraceId: 01kekxtc482x1c6ayckw8zg93c, Database: , SessionId: ydb://session/3?node_id=1&id=YzJlNWU0MGUtYjY2YWI4YmYtNTI4M2Q5ZDktYzlmZTU2ODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-10T12:24:09.624833Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzJlNWU0MGUtYjY2YWI4YmYtNTI4M2Q5ZDktYzlmZTU2ODQ=, ActorId: [1:7593707607022508938:2369], ActorState: ExecuteState, LegacyTraceId: 01kekxtc482x1c6ayckw8zg93c, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-10T12:24:09.626364Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:09.626578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-10T12:24:09.626646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:09.626667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:09.626707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-10T12:24:09.633511Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:09.634796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-10T12:24:09.641496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:09.651029Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:24:09.651479Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:24:09.651741Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:24:09.651894Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 Table has 2 shards |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> TExportToS3Tests::ShouldCheckQuotasExportsLimited |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2026-01-10T12:23:46.110299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707593255497057:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.115790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.314948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.431090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.431193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.448817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.482214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.548816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.752125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.752156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.752162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.752245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.110030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.137568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:48.936799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707601845432994:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.936894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.937142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707601845433004:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.937206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.277808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.428477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400465:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.428574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.428777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400467:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.428825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.454367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-10T12:23:49.538651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400573:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.538835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400599:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400603:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400604:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400610:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400607:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400613:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400601:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400602:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400608:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.541254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400648:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.541254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400641:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.541303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.541628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606140400659:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.541692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.543667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710663:0, at schemeshard: 72057594046644480 2026-01-10T12:23:49.543826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710663:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:23:49.543850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710663:1, at schemeshard: 72057594046644480 2026-01-10T12:23:49.543961Z node 1 :FLAT_TX_SCHEME ... eshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:24:09.528496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-10T12:24:09.533075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:24:09.533325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:24:09.533381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 2 -> 3 2026-01-10T12:24:09.535078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-10T12:24:09.540994Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7593707692039859531:3608] 2026-01-10T12:24:09.543028Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7593707692039859536:3607] 2026-01-10T12:24:09.559621Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:24:09.559710Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:24:09.559869Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:24:09.566827Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:24:09.566922Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:24:09.567058Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-10T12:24:09.568765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2026-01-10T12:24:09.575193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2026-01-10T12:24:09.575241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 3 -> 131 2026-01-10T12:24:09.580640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:09.604918Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-10T12:24:09.605027Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:09.605062Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:24:09.605085Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-10T12:24:09.605154Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-10T12:24:09.605231Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:09.605266Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:24:09.605271Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-10T12:24:09.605287Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:24:09.605511Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:24:09.608956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2026-01-10T12:24:09.609679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 131 -> 132 2026-01-10T12:24:09.611786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:24:09.612049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:24:09.612152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:09.612967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-01-10T12:24:09.613010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-01-10T12:24:09.613028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-10T12:24:09.618658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-10T12:24:09.618660Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:09.618758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:09.618786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:09.618835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-10T12:24:09.621302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-10T12:24:09.623076Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:09.623383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:09.630524Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:24:09.631246Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:24:09.631389Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:24:09.631704Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:24:09.632737Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-10T12:24:09.632803Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-10T12:24:09.670742Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:09.670828Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:09.869837Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:09.869883Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:10.187304Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:10.187355Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:10.794440Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:10.794468Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:11.499970Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:11.815243Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:12.223178Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:12.477158Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:12.960507Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726897, task: 1, CA Id [1:7593707692039859828:2380]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-10T12:24:13.476570Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726898, task: 1, CA Id [1:7593707692039859848:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 Table has 2 shards |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2026-01-10T12:23:46.911065Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707594046214767:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.912812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:47.114408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:47.157160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:47.157273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:47.186210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:47.218250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:47.324852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:47.324890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:47.324904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:47.325008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.371703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:47.543904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.918269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:49.741835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606931118002:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.741959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.742231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707606931118012:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.742287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.946658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:50.088163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085472:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.088258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.088539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085474:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.088578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.110873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-10T12:23:50.200274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085573:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.200419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.200951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085588:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085593:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085602:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085600:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085605:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085603:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085606:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.201439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085609:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.207602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085634:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.207667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085636:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.207703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085651:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.207743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.209824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085675:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.209906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707611226085677:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.209962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:50.211700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-10T12:23:50.211945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:S ... 8078:2369] TxId: 281474976723380. Ctx: { TraceId: 01kekxtd505wtz0py1zg958ngj, Database: , SessionId: ydb://session/3?node_id=1&id=YTdkYTRmMzctZTA3MTg2OTEtZWIzYzdjZTYtN2UzMjI4ZmQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-10T12:24:10.672961Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YTdkYTRmMzctZTA3MTg2OTEtZWIzYzdjZTYtN2UzMjI4ZmQ=, ActorId: [1:7593707611226085556:2369], ActorState: ExecuteState, LegacyTraceId: 01kekxtd505wtz0py1zg958ngj, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/Foo`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-10T12:24:10.673029Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-10T12:24:10.673050Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-10T12:24:10.673227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-10T12:24:10.673287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:10.673303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:10.673334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-10T12:24:10.673336Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:807: SelfId: [1:7593707697125518114:2376], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7593707611226085564:2376]Got WRONG SHARD STATE for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7593707697125518114:2376].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2026-01-10T12:24:10.673390Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7593707697125518080:2376], SessionActorId: [1:7593707611226085564:2376], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7593707611226085564:2376]. 2026-01-10T12:24:10.673556Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OGIwM2Y0NTQtNWU0ZjMyYjgtZTdmNWVmMzEtOGJkODg2NzQ=, ActorId: [1:7593707611226085564:2376], ActorState: ExecuteState, LegacyTraceId: 01kekxtd52cyspyvn46vt50jwy, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7593707697125518082:2376] from: [1:7593707697125518080:2376] trace_id# 2026-01-10T12:24:10.673630Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:7593707697125518082:2376] TxId: 281474976723381. Ctx: { TraceId: 01kekxtd52cyspyvn46vt50jwy, Database: , SessionId: ydb://session/3?node_id=1&id=OGIwM2Y0NTQtNWU0ZjMyYjgtZTdmNWVmMzEtOGJkODg2NzQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-10T12:24:10.673823Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGIwM2Y0NTQtNWU0ZjMyYjgtZTdmNWVmMzEtOGJkODg2NzQ=, ActorId: [1:7593707611226085564:2376], ActorState: ExecuteState, LegacyTraceId: 01kekxtd52cyspyvn46vt50jwy, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/Foo`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-10T12:24:10.675321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-10T12:24:10.771318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.771526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999760s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.773213Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707598341182415:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-10T12:24:10.773215Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707598341182415:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-10T12:24:10.820001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 48, next wakeup# 14.951296s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.824142Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-10T12:24:10.830137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 58, next wakeup# 14.941160s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.832983Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-10T12:24:10.838718Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:10.842609Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:10.848149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:10.853412Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:24:10.853721Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:24:10.856004Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:24:10.856289Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:24:10.920490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 3921, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 582.534750s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.920678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.850610s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.922511Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707598341182415:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-10T12:24:10.922585Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037890 tableId# 38 localTid# 1001, requested from [1:7593707598341182415:2207], partsCount# 2, memtableSize# 5680, memtableWaste# 6096, memtableRows# 37 2026-01-10T12:24:10.922599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 1, next wakeup# 14.848695s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:10.986423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 65 ms, with status# 0, next wakeup in# 582.468824s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:13.047413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, compactionInfo# {72057594046644480:2, SH# 2, Rows# 4282, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 580.407828s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:13.047706Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037889 tableId# 38 localTid# 1001, requested from [1:7593707598341182415:2207], partsCount# 2, memtableSize# 2528, memtableWaste# 2080, memtableRows# 18 2026-01-10T12:24:13.118738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 71 ms, with status# 0, next wakeup in# 580.336507s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 2 shards, running# 0 shards at schemeshard 72057594046644480 Table has 2 shards |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> TExportToS3Tests::DecimalOutOfRange [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:44.335172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:44.404541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:44.409640Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:44.409904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:44.409955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:44.674560Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:44.761840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:44.761999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:44.796003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:44.866952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:45.501491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:45.502422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:45.502466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:45.502497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:45.503078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:45.567828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:46.066220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:48.538684Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:48.544501Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:48.547125Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:48.571455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.571584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.608537Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:48.610708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:48.753342Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.753880Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.754362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.754906Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.754975Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.755171Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.755285Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.755344Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.755486Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:48.918017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:48.940053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:48.940187Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:48.952772Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:49.091734Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:49.120048Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:49.120137Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:49.144825Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:49.146154Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:49.146310Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:49.146351Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:49.146389Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:49.146426Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:49.146470Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:49.146509Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:49.147336Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:49.252579Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:49.252699Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2208:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:49.265790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2240:2634] 2026-01-10T12:21:49.266055Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2240:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:49.407312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2413:2743] 2026-01-10T12:21:49.407693Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:49.409207Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:49.409246Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Creating table 2026-01-10T12:21:49.409308Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:49.411943Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2432:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:49.414381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:49.419710Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:49.419794Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:49.429331Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:49.588634Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:49.815794Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:49.945551Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:49.945642Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Column diff is empty, finishing 2026-01-10T12:21:50.570003Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ty: 1 } 2026-01-10T12:22:23.424876Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4306:3799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:23.425187Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:22:23.425276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4308:3801] 2026-01-10T12:22:23.425361Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4308:3801] 2026-01-10T12:22:23.425753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4309:3802] 2026-01-10T12:22:23.425900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4309:3802], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:22:23.425973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:22:23.426093Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4308:3801], server id = [2:4309:3802], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:23.426191Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:22:23.426268Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4306:3799], StatRequests.size() = 1 2026-01-10T12:22:23.426495Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:23:22.961971Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4171:3724], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:22.962224Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4171:3724], Start read next stream part 2026-01-10T12:23:22.962429Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxq3xg5vb3a4wj2fm9am5f", SessionId: ydb://session/3?node_id=2&id=MjlhMzQ0YjktYmQwYWNjMWMtYjFjMGNiMmItYzMyZjQ4YmM=, Slow query, duration: 60.060059s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:22.964104Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:22.966896Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-10T12:23:22.967848Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:22.967979Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], Start read next stream part 2026-01-10T12:23:22.968210Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4171:3724], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:22.968267Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4171:3724], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjJhM2FlMzUtYTMyMTQyOGUtODU5ZTUzZmUtNGI2YmYyZmQ=, TxId: 2026-01-10T12:23:23.059717Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4445:3924]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:23.059974Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:23:23.060018Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4445:3924], StatRequests.size() = 1 2026-01-10T12:24:11.126292Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxryjqc3gr49cjtgrs5c8j", SessionId: ydb://session/3?node_id=2&id=NDMxMjJmNmEtMWQzMWRmOWQtZTNkOTNjZWEtYTQ3OTQ1MmI=, Slow query, duration: 48.153569s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:24:11.128711Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:24:11.128869Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], Start read next stream part 2026-01-10T12:24:11.129329Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33180, txId: 18446744073709551615] shutting down 2026-01-10T12:24:11.130027Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4533:4007], ActorId: [2:4536:4009], Starting query actor #1 [2:4539:4011] 2026-01-10T12:24:11.130115Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4536:4009], ActorId: [2:4539:4011], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:24:11.134323Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:24:11.134431Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4161:2472], ActorId: [2:4419:3902], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGNkY2EzMDgtOGY4ZWQ5ZDktODFjN2ZhMzUtN2Y0NDczYQ==, TxId: 2026-01-10T12:24:11.135059Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4536:4009], ActorId: [2:4539:4011], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTBkNDQ4YTctNDAzMWY1ZGYtZDBjMTYyNTQtMjg2ZTU5Yzk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:24:11.210173Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4552:4023]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:11.210502Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:24:11.210552Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4552:4023], StatRequests.size() = 1 2026-01-10T12:24:11.363108Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4536:4009], ActorId: [2:4539:4011], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTBkNDQ4YTctNDAzMWY1ZGYtZDBjMTYyNTQtMjg2ZTU5Yzk=, TxId: 2026-01-10T12:24:11.363207Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4536:4009], ActorId: [2:4539:4011], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTBkNDQ4YTctNDAzMWY1ZGYtZDBjMTYyNTQtMjg2ZTU5Yzk=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-10T12:24:11.459897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:24:11.460003Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:24:11.481873Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4308:3801], schemeshard count = 1 2026-01-10T12:24:11.786781Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:26: [72075186224037894] Delete long analyze operation, OperationId=operationId 2026-01-10T12:24:12.053567Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:24:12.053842Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-10T12:24:12.053966Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-10T12:24:12.065982Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:24:12.066075Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:12.066422Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:24:12.961479Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:12.961569Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:12.961625Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:12.961674Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:16.787714Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:44: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:3208:3452] 2026-01-10T12:24:16.787844Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> StreamCreator::WithResolvedTimestamps >> TExportToS3Tests::CorruptedDecimalValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2965:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2965:2117] 2026-01-10T12:23:36.694726Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:36.696032Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:36.696476Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:36.698787Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:36.699446Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:36.699802Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.699847Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:36.700201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:36.710242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:36.710414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:36.710584Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:36.710726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.710821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:36.710887Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-10T12:23:36.722942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:36.723114Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.765844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:36.766028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.766119Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:36.766216Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.766380Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:36.766467Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.766509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:36.766561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.777405Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:36.777548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.788438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:36.788604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:36.790102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:36.790154Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:36.790372Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:36.790432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:36.808102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2026-01-10T12:23:36.809071Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-10T12:23:36.809158Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-10T12:23:36.809188Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-10T12:23:36.809214Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-10T12:23:36.809240Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-10T12:23:36.809267Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-10T12:23:36.809292Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-10T12:23:36.809346Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-10T12:23:36.809387Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-10T12:23:36.809415Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-10T12:23:36.809445Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-10T12:23:36.809472Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-10T12:23:36.809496Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-10T12:23:36.809521Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-10T12:23:36.809545Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-10T12:23:36.809584Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-10T12:23:36.809627Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-10T12:23:36.809663Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-10T12:23:36.809704Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-10T12:23:36.809735Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-10T12:23:36.809760Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-10T12:23:36.809785Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-10T12:23:36.809810Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-10T12:23:36.809850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-10T12:23:36.809879Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-10T12:23:36.809909Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-10T12:23:36.809934Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-10T12:23:36.809959Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-10T12:23:36.809992Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-10T12:23:36.810039Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2026-01-10T12:23:36.810067Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2026-01-10T12:23:36.810094Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2026-01-10T12:23:36.810120Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Cr ... R NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2026-01-10T12:24:08.314865Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2026-01-10T12:24:08.314894Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2026-01-10T12:24:08.314920Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2026-01-10T12:24:08.314949Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2026-01-10T12:24:08.314979Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2026-01-10T12:24:08.315008Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2026-01-10T12:24:08.315036Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2026-01-10T12:24:08.315067Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2026-01-10T12:24:08.315097Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2026-01-10T12:24:08.315134Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2026-01-10T12:24:08.315166Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2026-01-10T12:24:08.315194Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2026-01-10T12:24:08.315223Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2026-01-10T12:24:08.315252Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2026-01-10T12:24:08.315311Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2026-01-10T12:24:08.315339Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2026-01-10T12:24:08.315370Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2026-01-10T12:24:08.315398Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2026-01-10T12:24:08.315426Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2026-01-10T12:24:08.315460Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2026-01-10T12:24:08.684841Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.377879s 2026-01-10T12:24:08.685095Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.378168s 2026-01-10T12:24:08.734933Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2026-01-10T12:24:08.737418Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2026-01-10T12:24:08.737498Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2026-01-10T12:24:08.737540Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2026-01-10T12:24:08.737568Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2026-01-10T12:24:08.737621Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2026-01-10T12:24:08.737652Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2026-01-10T12:24:08.737680Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2026-01-10T12:24:08.737707Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2026-01-10T12:24:08.737733Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2026-01-10T12:24:08.737760Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2026-01-10T12:24:08.737811Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2026-01-10T12:24:08.737844Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2026-01-10T12:24:08.737892Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2026-01-10T12:24:08.737951Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2026-01-10T12:24:08.737985Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2026-01-10T12:24:08.738012Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2026-01-10T12:24:08.738039Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2026-01-10T12:24:08.738067Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2026-01-10T12:24:08.738093Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2026-01-10T12:24:08.738120Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2026-01-10T12:24:08.738148Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2026-01-10T12:24:08.738175Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2026-01-10T12:24:08.738205Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2026-01-10T12:24:08.738234Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2026-01-10T12:24:08.738261Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2026-01-10T12:24:08.738315Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2026-01-10T12:24:08.738347Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2026-01-10T12:24:08.738374Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2026-01-10T12:24:08.738406Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2026-01-10T12:24:08.738436Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] Test command err: 2026-01-10T12:20:50.150261Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706835317108879:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.150314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.219425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:50.220056Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.634684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.634787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.738238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.753574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.788315Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706835317108747:2081] 1768047650136977 != 1768047650136980 2026-01-10T12:20:50.812876Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:51.002294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.108281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00199f/r3tmp/yandexXNZdIB.tmp 2026-01-10T12:20:51.108306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00199f/r3tmp/yandexXNZdIB.tmp 2026-01-10T12:20:51.108432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00199f/r3tmp/yandexXNZdIB.tmp 2026-01-10T12:20:51.108507Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.169439Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.344227Z INFO: TTestServer started on Port 7632 GrpcPort 26851 PQClient connected to localhost:26851 2026-01-10T12:20:51.634851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.644574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:51.654193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.732547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:51.980497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:20:51.988986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-10T12:20:54.116686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852496978948:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.116838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.119819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852496978985:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.119861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706852496978986:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.119979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.124539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.151651Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706852496978989:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.219841Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706852496979055:2649] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.729567Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706852496979063:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.732398Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTNjOTVjMmItODI0NjM4Zi1hYzk0NzEyOC1iYTlkMGU4Nw==, ActorId: [1:7593706852496978945:2328], ActorState: ExecuteState, LegacyTraceId: 01kekxmd5k71mw99jydaj9qg1t, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.734598Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.772676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.814417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.947890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.150475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706835317108879:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.150540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706856791946640:2829] === CheckClustersList. Ok 2026-01-10T12:21:01.431405Z :WriteToTopic_Demo_23_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.454553Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.477329Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706882561750646:2940] connected; active server actors: 1 2026-01-10T12:21:01.477567Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.498719Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.498928Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [720 ... er disconnected, pipe [13:7593707712178933320:2486] destroyed 2026-01-10T12:24:17.069845Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:17.069874Z node 13 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Writing. Can't process user action and tx events 2026-01-10T12:24:17.069887Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:24:17.069897Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:17.069908Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.069919Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:17.069929Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.069938Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:24:17.071590Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100000 offset 80 count 1 size 1000243 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.071644Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:24:17.071687Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 100000 offset 80 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.071697Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 WriteNewSizeFromSupportivePartitions# 0 2026-01-10T12:24:17.071731Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976710674}, 100000} 2026-01-10T12:24:17.071774Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976710674}, 100000}, SeqNo: 81, partNo: 0, Offset: 80 is stored on disk 2026-01-10T12:24:17.071795Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976710674}, 100000} 2026-01-10T12:24:17.071812Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976710674}, 100000}, SeqNo: 81, partNo: 1, Offset: 80 is stored on disk 2026-01-10T12:24:17.073872Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:24:17.073893Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.073906Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:17.073923Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.073938Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Try persist 2026-01-10T12:24:17.073965Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:24:17.074033Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic_A' partition: 0 messageNo: 161 requestId: cookie: 81 2026-01-10T12:24:17.074223Z node 13 :PERSQUEUE DEBUG: partition.cpp:4395: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-10T12:24:17.074469Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-10T12:24:17.074494Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-10T12:24:17.075924Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 63 count 8 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.075941Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 71 count 3 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.075954Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 74 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.075966Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 75 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.075981Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 76 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.075992Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 77 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.076004Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 78 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.076015Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 79 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.076031Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 80 count 1 actorID [13:7593707712178933236:2468] 2026-01-10T12:24:17.076113Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-10T12:24:17.076138Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 63 partno 0 count 8 parts 8 suffix '0' size 8001771 2026-01-10T12:24:17.076164Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 71 partno 0 count 3 parts 3 suffix '124' size 3000671 2026-01-10T12:24:17.076184Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 74 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076203Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 75 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076231Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 76 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076252Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 77 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076280Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 78 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076298Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 79 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076315Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 80 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-10T12:24:17.076317Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:24:17.076338Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:24:17.076361Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.076378Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:17.076394Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.076408Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] Try persist 2026-01-10T12:24:17.076431Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976710674}, 100000}][StateIdle] No data for blobs compaction 2026-01-10T12:24:17.079282Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72075186224037894] DeletePartition {0, {13, 281474976710674}, 100000} 2026-01-10T12:24:17.079375Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-10T12:24:17.113158Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-10T12:24:17.169250Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:17.169297Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.169314Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:17.169339Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.169355Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:24:17.169683Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:24:17.169700Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.169708Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:24:17.169725Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:24:17.169735Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |94.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> TExportToS3Tests::SchemaMapping >> TExportToS3Tests::CorruptedDecimalValue [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount >> ExternalBlobsMultipleChannels::Simple |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction >> StreamCreator::Basic [GOOD] >> TExportToS3Tests::SchemaMapping [GOOD] >> StreamCreator::WithResolvedTimestamps [GOOD] >> KqpSysColV1::StreamInnerJoinTables >> KqpSystemView::PartitionStatsOrderByDesc >> KqpSystemView::PartitionStatsParametricRanges >> KqpSysColV0::SelectRowById >> KqpSystemView::PartitionStatsRange2 >> TExportToS3Tests::SchemaMappingEncryption >> KqpSysColV1::InnerJoinSelect >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSystemView::PartitionStatsRange3 >> KqpSystemView::QueryStatsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.243584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.243636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.257831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.257956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.258149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.265797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.265996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.266629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.266829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.268795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.268949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.273072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.273228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.273340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.274087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.408281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.411001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.411094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.411164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.411242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000041 2026-01-10T12:24:20.988964Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:20.989101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:20.989167Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000041, at schemeshard: 72057594046678944 2026-01-10T12:24:20.989291Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 128 -> 129 2026-01-10T12:24:20.989435Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:25930 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9EE52EDB-3655-4048-96CC-F9C931130892 amz-sdk-request: attempt=1 content-length: 119 content-md5: oX2vUQoa+8sc65ouCpRQqg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 119 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000041 2026-01-10T12:24:21.023939Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:21.023995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:24:21.024256Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:21.024295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 40 2026-01-10T12:24:21.024722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-10T12:24:21.024786Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:24:21.026263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-10T12:24:21.026386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-10T12:24:21.026423Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2026-01-10T12:24:21.026469Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-10T12:24:21.026521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-10T12:24:21.026645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /Backup1/metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:25930 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 942F609F-B08B-42B4-A284-4C10199FAD6B amz-sdk-request: attempt=1 content-length: 78 content-md5: Vj/U8z9C9WvNxxNxUoW5sg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json.sha256 / / 78 FAKE_COORDINATOR: Erasing txId 281474976710759 2026-01-10T12:24:21.028427Z node 7 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation 2026-01-10T12:24:21.040821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2026-01-10T12:24:21.063683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 850 RawX2: 30064773889 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-10T12:24:21.063764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2026-01-10T12:24:21.063909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 850 RawX2: 30064773889 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-10T12:24:21.064027Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 850 RawX2: 30064773889 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-10T12:24:21.064137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:21.064192Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-10T12:24:21.064236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-10T12:24:21.064291Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 129 -> 240 2026-01-10T12:24:21.064485Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:21.066527Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-10T12:24:21.066842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-10T12:24:21.066889Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2026-01-10T12:24:21.067005Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-10T12:24:21.067053Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-10T12:24:21.067095Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-10T12:24:21.067143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-10T12:24:21.067179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2026-01-10T12:24:21.067247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:127:2152] message: TxId: 281474976710759 2026-01-10T12:24:21.067309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-10T12:24:21.067350Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:0 2026-01-10T12:24:21.067379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:0 2026-01-10T12:24:21.067486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-10T12:24:21.069461Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2026-01-10T12:24:21.069529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710759 2026-01-10T12:24:21.069756Z node 7 :EXPORT NOTICE: schemeshard_export__create.cpp:729: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2026-01-10T12:24:21.071603Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:24:21.071661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:879:2838] TestWaitNotification: OK eventTxId 102 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView >> KqpSystemView::NodesOrderByDesc >> KqpSysColV1::StreamSelectRowAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2026-01-10T12:24:19.141153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707735324408676:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:19.141249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:19.361032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:19.452124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:19.452240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:19.455875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:19.529366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:19.596963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:19.864294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:19.864377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:19.864383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:19.864447Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:20.147980Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:20.285151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:20.296601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:20.405516Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:24:20.405594Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:24:20.406215Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:24:21.872695Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047860404, tx_id: 281474976710658 } } } 2026-01-10T12:24:21.873860Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:24:21.875225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:21.875804Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:24:21.875826Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:24:21.898458Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:24:21.898510Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:24:21.901588Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-10T12:24:22.010997Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7593707748209311658:2336] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:24:22.028618Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:24:22.028643Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-10T12:24:22.049544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:24:22.063787Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:24:22.063853Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2026-01-10T12:24:19.822454Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707736254643689:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:19.822505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:19.996375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:20.007876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:20.007997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:20.010148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:20.081468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:20.082763Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707736254643648:2081] 1768047859821155 != 1768047859821158 2026-01-10T12:24:20.211217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:20.300878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:20.300925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:20.300942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:20.301036Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:20.543998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:20.550719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:20.643095Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-10T12:24:20.643154Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-10T12:24:20.643758Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-10T12:24:20.828320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:22.091224Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1768047860642, tx_id: 281474976710658 } } } 2026-01-10T12:24:22.091604Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-10T12:24:22.093313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:22.094108Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-10T12:24:22.094126Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-10T12:24:22.115499Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-10T12:24:22.115530Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-10T12:24:22.116024Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-10T12:24:22.219337Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7593707749139546670:2336] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-10T12:24:22.223562Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:24:22.223591Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-10T12:24:22.233091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-10T12:24:22.244200Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-10T12:24:22.244235Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |94.0%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> KqpSystemView::PartitionStatsRange1 >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView >> KqpSysColV0::InnerJoinTables >> KqpSystemView::TopQueriesOrderByDesc >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:24.160026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:24.306224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:24.330636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:24.331137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.331243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:24.824855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.825042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.832415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047861550251 != 1768047861550254 2026-01-10T12:24:24.847634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:24.899988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:25.016837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:25.330586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:25.344029Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:25.463882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.818594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:24:26.050532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.247083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.247136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.261970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.262066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.262242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.269322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.269488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.269954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.270114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.271669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.271806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.273083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.273199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.273231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.273284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.274101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.408282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.410912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... otify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-10T12:24:27.086052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-10T12:24:27.086214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:24:27.086632Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000041 2026-01-10T12:24:27.087155Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:27.087334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:27.087406Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000041, at schemeshard: 72057594046678944 2026-01-10T12:24:27.087576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-10T12:24:27.087699Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-10T12:24:27.087767Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:24:27.087829Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-10T12:24:27.087882Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:24:27.087969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:24:27.088070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:24:27.088153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-10T12:24:27.088222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:24:27.088281Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-10T12:24:27.088332Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-10T12:24:27.088430Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:24:27.088493Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-10T12:24:27.088542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:24:27.088581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-10T12:24:27.090021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.091864Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:27.091916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:27.092136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-10T12:24:27.092336Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:27.092376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-10T12:24:27.092420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 40 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-10T12:24:27.093193Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.093272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.093304Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-10T12:24:27.093359Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:24:27.093409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-10T12:24:27.093816Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.093876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.093899Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-10T12:24:27.093921Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-10T12:24:27.093945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-10T12:24:27.094022Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-10T12:24:27.094067Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-10T12:24:27.094355Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:24:27.094407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-10T12:24:27.094486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:24:27.097878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.099174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:24:27.099311Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-10T12:24:27.099416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-10T12:24:27.099487Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-10T12:24:27.099525Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2026-01-10T12:24:27.099564Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2026-01-10T12:24:27.100004Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:24:27.101859Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2026-01-10T12:24:27.102210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:24:27.102273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:24:27.102895Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:24:27.103026Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:24:27.103113Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:950:2908] TestWaitNotification: OK eventTxId 103 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] >> KqpSysColV0::SelectRowById [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSystemView::NodesSimple >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:23:41.149351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:23:41.149479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:23:41.149575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:23:41.149616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:23:41.149650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:23:41.149730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:23:41.149811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:23:41.150796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:23:41.152798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:23:41.250107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:23:41.250169Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:41.266075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:23:41.266180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:23:41.266342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:23:41.274558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:23:41.274774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:23:41.275509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:23:41.275739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:23:41.278012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.278209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:23:41.279412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:23:41.279474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:23:41.279543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:23:41.279629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:23:41.279696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:23:41.279988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:23:41.411346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:23:41.412846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:24:28.871444Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.871534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.871568Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:24:28.871604Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-10T12:24:28.871642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:24:28.871742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-10T12:24:28.874381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:24:28.874437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-10T12:24:28.874478Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-10T12:24:28.881368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-10T12:24:28.881544Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:24:28.882000Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-10T12:24:28.882486Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:28.882623Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 30064773230 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:28.882695Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-10T12:24:28.882870Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-10T12:24:28.882970Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-10T12:24:28.883037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:24:28.883097Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-10T12:24:28.883147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:24:28.883230Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:24:28.883312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:24:28.883359Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-10T12:24:28.883420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-10T12:24:28.883490Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-10T12:24:28.883548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-10T12:24:28.883630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:24:28.883671Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-10T12:24:28.883750Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-10T12:24:28.883797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-10T12:24:28.885707Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.887637Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:28.887686Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:28.887866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:24:28.888000Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:28.888044Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-10T12:24:28.888113Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-10T12:24:28.888990Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.889090Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.889130Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:24:28.889202Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-10T12:24:28.889266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:24:28.890105Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.890190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.890221Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:24:28.890257Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-10T12:24:28.890290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:24:28.890377Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-10T12:24:28.890443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:129:2154] 2026-01-10T12:24:28.893738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.893929Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:24:28.893982Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:24:28.894024Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:24:28.895550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:24:28.895615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:988:2940] TestWaitNotification: OK eventTxId 102 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 3212, MsgBus: 11817 2026-01-10T12:24:23.001554Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707753253328847:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.001655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.252203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.258428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.258551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.293187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.364579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.368872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707748958361515:2081] 1768047862998550 != 1768047862998553 2026-01-10T12:24:23.484933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.566907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.566943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.566957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.567028Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.012299Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.166580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.178893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.252426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.418752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.579149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.650962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.320689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766138232570:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.320791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.321175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766138232580:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.321219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.742310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.785752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.820958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.854619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.889472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.940581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.034781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.125624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.218615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770433200755:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.218711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.218947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770433200761:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770433200760:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.231114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.241959Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707770433200764:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.316710Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707770433200817:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.001771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707753253328847:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.001863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 12449, MsgBus: 16926 2026-01-10T12:24:23.012507Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707753225445856:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.012566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.360327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.360440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.407788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.446071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.446199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.568025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.568047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.568054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.568154Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:23.625102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.029832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.193579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.199529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.266333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.415671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.604882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.689214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.497548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766110349594:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.497659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.498050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766110349604:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.498119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.830253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.864647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.902604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.945187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.984394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.037479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.075551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.137325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.238990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770405317771:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.239082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.239159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770405317776:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.239399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770405317778:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.239435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.243005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.253693Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707770405317779:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.320729Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707770405317831:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.012755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707753225445856:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.012824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.187144Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047869176, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 25548, MsgBus: 65259 2026-01-10T12:24:23.017273Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707750280733433:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.018899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.267414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.267540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.350613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.360442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.370372Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707745985766012:2081] 1768047862998512 != 1768047862998515 2026-01-10T12:24:23.374312Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.570167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.570226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.570256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.570330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:23.576260Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.016962Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.162505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.172337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.240361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.423333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.589761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.660379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.202636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763165637068:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.202827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.203404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763165637078:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.203478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.745113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.777731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.820028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.851270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.884229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.925401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.970464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.045728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.146130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767460605241:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767460605246:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767460605247:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.154017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.166672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707767460605250:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.268314Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707767460605303:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.006916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707750280733433:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.008540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 23381, MsgBus: 15293 2026-01-10T12:24:23.002152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707750953511671:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.002216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.310749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.310846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.329450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.351881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.370092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.566878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.566949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.566961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.567028Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:23.619576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.018730Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.163969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.171097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.240573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.443232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.610028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.681369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.170909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763838415393:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.170999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.171296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763838415402:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.171327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.741904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.778066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.812306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.851491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.887614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.924362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.968848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.036418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.159115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768133383573:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.159189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768133383578:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.159211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.159478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768133383581:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.159529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.163155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.177111Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707768133383580:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.271602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707768133383633:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.002034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707750953511671:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.002124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.207888Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047869202, txId: 281474976710673] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 31141, MsgBus: 14189 2026-01-10T12:24:23.003399Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707753629028393:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.003453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.232407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.260766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.260888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.337130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.362301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.464924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.568775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.568804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.568809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.568884Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.020215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.257720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.269055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.322776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.472031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.647134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.728135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.305088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766513932055:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.305197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.305984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766513932065:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.306072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.742313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.783022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.822164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.853763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.887558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.926772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.999403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.103829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.213456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770808900238:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.213543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.213815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770808900243:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.213865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770808900244:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.213901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.217769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.236945Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707770808900247:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:27.336172Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707770808900300:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.003759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707753629028393:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.003825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.464696Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047869476, txId: 281474976715673] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 28624, MsgBus: 19378 2026-01-10T12:24:23.001966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707752082538286:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.002059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.225756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.271035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.271173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.354531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.381855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.459382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.566984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.567016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.567037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.567147Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.012916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.186242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.197962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.263459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.435617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.603900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.671473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.325801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707764967442013:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.325909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.328201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707764967442023:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.328251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.787486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.828694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.862778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.906197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.947597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.992458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.052142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.139075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.218960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769262410195:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769262410200:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769262410201:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.219386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.223506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.236667Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707769262410204:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.308468Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707769262410257:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.001891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707752082538286:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.001957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 23964, MsgBus: 25644 2026-01-10T12:24:23.001753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707752610307100:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.001835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.241699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.295614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.295742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.339554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.383097Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.466187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.567926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.567961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.567998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.569007Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.014541Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.212510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.234330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.306606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.515424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.681756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.749372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.309602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707765495210817:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.309713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.310041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707765495210827:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.310115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.742348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.784370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.818972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.854853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.888659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.940422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.978577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.034492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.146110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769790178991:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769790178997:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769790178996:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.155084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.170156Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707769790179000:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:27.247066Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707769790179051:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.001883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707752610307100:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.001968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.466122Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047869447, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 31964, MsgBus: 19786 2026-01-10T12:24:23.464025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707750957275122:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.464071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.721543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.721680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.757350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.758634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.769355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.904808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.904840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.904846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.904917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.030690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.336047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.398600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.485214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.593706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.757076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.852552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.817995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763842178847:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.818158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.818715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763842178857:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.818807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.164103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.199685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.227910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.258904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.291728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.361257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.400387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.457578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.541399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768137147026:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.541497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.541769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768137147031:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.541837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707768137147033:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.541964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.546698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.559784Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707768137147035:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:27.645006Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707768137147086:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.464319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707750957275122:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.464416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.353801Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047869392, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 26189, MsgBus: 11059 2026-01-10T12:24:23.003351Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707753115786832:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.003418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.284066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.318808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.318960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.375714Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707748820819428:2081] 1768047862998507 != 1768047862998510 2026-01-10T12:24:23.390221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.391294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.469488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.567889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.567913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.567918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.567999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.024885Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.200856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.261253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.419718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.566789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.657021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.307983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766000690499:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.308112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.308618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707766000690509:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.308684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.742079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.780285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.814242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.851710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.888740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.939409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.992474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.098370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.177105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770295658676:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.177155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.177333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770295658682:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.177363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.177431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770295658681:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.181204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.192218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707770295658685:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:27.262224Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707770295658738:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.003536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707753115786832:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.003638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsRange1 [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 27256, MsgBus: 10666 2026-01-10T12:24:23.382525Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707750503698017:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.382630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.623946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.649160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.649285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.692695Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707750503697974:2081] 1768047863362802 != 1768047863362805 2026-01-10T12:24:23.702982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.708893Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.836279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.836317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.836324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.836442Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:23.842406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.281372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.287140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.345446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.420935Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.549923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.708972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.796069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.708905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763388601756:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.709042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.709394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707763388601766:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.709474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.051690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.086872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.126883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.165007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.207410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.244927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.277154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.316746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.401930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767683569930:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.401999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.402373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767683569936:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.402449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707767683569935:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.402525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.405989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.417297Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707767683569939:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.480990Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707767683569990:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.382635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707750503698017:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.382711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.025180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 9896, MsgBus: 23796 2026-01-10T12:24:23.001584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707752711029543:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.001638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.268287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.306673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.306854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.371552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.372773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.375637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707748416062208:2081] 1768047862998465 != 1768047862998468 2026-01-10T12:24:23.493104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.567203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.567248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.567255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.567342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.020247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.158692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.177605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:24.244852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.411585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.567424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:24.670992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.277135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707765595933263:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.277236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.277557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707765595933273:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.277599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:26.742425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.781441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.817869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.848807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.884254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.928973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.009042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.066164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.146656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769890901445:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.146985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769890901450:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.147016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769890901451:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.147106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.155866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:27.167998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707769890901454:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:27.244937Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707769890901505:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:28.003620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707752711029543:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.003673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 7480, MsgBus: 28591 2026-01-10T12:24:25.506884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707758529932835:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:25.507883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:25.728786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:25.728910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:25.731446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:25.809989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:25.817280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:25.925334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:25.925361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:25.925370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:25.925450Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:26.007678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:26.342314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:26.405390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.512186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:26.553433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.692669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.763663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.680242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707771414836540:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.680388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.680680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707771414836550:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.680732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.996654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.025243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.053296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.082181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.111437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.145599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.196694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.238936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.307248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775709804717:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.307327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.307384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775709804722:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.307505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775709804724:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.307557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.311278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:29.322346Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707775709804726:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:29.415959Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707775709804777:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:30.508237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707758529932835:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:30.508350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:31.156787Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047871149, txId: 281474976710673] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 >> KqpSysColV1::StreamSelectRange >> KqpSystemView::PartitionStatsFollower >> KqpSysColV1::SelectRowById >> KqpSysColV0::UpdateAndDelete >> KqpSysColV1::StreamSelectRowById >> KqpSysColV1::SelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 16975, MsgBus: 14557 2026-01-10T12:24:25.759177Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707759009629160:2127];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:25.759572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:25.984272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:26.003191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:26.003311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:26.037272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:26.100866Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707759009629071:2081] 1768047865751271 != 1768047865751274 2026-01-10T12:24:26.139912Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:26.228991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:26.229035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:26.229047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:26.229141Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:26.252198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:26.666366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:26.678326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:26.713608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.773256Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:26.904684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.095606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.183670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.173353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776189500139:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.173485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.173847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776189500149:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.173909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.513193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.540887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.565538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.588773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.614266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.647399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.679790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.721924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.791165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776189501016:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.791257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.791293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776189501021:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.791519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776189501023:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.791563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.796277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:29.811698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707776189501024:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:29.910618Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707776189501078:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:30.756219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707759009629160:2127];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:30.756296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:31.307399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSystemView::NodesRange1 >> KqpSystemView::PartitionStatsSimple >> KqpSysColV1::StreamInnerJoinSelect |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 62187, MsgBus: 24721 2026-01-10T12:24:26.139349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707763354175673:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:26.139420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:26.428070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:26.428194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:26.470128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:26.489668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:26.514573Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707763354175646:2081] 1768047866137753 != 1768047866137756 2026-01-10T12:24:26.515778Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:26.594751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:26.594773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:26.594777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:26.594836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:26.679111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:27.050501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:27.127856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.168214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:27.318453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.468827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.536970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.434267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776239079413:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.434378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.434729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707776239079423:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.434797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.764709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.794918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.823385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.852528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.883573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.913684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.963923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:30.002986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:30.075131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707780534047588:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.075213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.075315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707780534047593:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.075384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707780534047595:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.075428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.079346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:30.090427Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707780534047597:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:30.160826Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707780534047650:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:31.139833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707763354175673:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:31.141631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join >> KqpSystemView::NodesOrderByDesc [GOOD] >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> KqpSystemView::Sessions-EnableRealSystemViewPaths >> KqpSysColV1::SelectRowAsterisk >> ExternalBlobsMultipleChannels::Simple [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> KqpSystemView::TopQueriesOrderByDesc [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> KqpSystemView::FailResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 4660, MsgBus: 15949 2026-01-10T12:24:23.776696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707749501817563:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.776763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.847184Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707750289069519:2257];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.847243Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.900853Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707753291790739:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.901340Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.957582Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593707751839401526:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.965147Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:24.245837Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.246042Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.248646Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.252219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.276198Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.278706Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.476223Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.490348Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.521250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.530997Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.544620Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:24.752833Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:24.799794Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.801366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.814920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.820222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.820545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.820645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.820757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.820792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.820898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.820931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.821103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.821150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.836408Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:24:24.836446Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:24:24.836463Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:24.836479Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:24.836889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:24.837295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:24.848269Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.858318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.864778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:24.865000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:24.866058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:24.905007Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.912381Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.964750Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.985992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:25.021004Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:25.051506Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:25.060642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:25.060690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:25.060711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:25.060810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:25.078928Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:25.109209Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:25.610411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976730657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:25.684795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.989420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.190595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:26.301130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.779349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707749501817563:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.802952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:28.825952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770976656147:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.826084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.826437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707770976656157:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.826499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:28.842120Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707750289069519:2257];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.842197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:28.897249Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707753291790739:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.897321Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:28.935265Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593707751839401526:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:28.935363Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:29.167933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.225285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.274929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.327694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.380823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.439395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.496772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.620099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:29.708421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775271624406:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.708491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.708570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775271624411:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.708673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707775271624413:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.708712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:29.711751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:29.736474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707775271624415:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730670 completed, doublechecking } 2026-01-10T12:24:29.804140Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707775271624493:4529] txid# 281474976730671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:24.159984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:24.310833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:24.333052Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:24.333443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.333543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:24.824338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.824483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.832025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047861550238 != 1768047861550241 2026-01-10T12:24:24.849474Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:24.906418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:25.025058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:25.330519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:25.344616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:25.466568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.821823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.821959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.822028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.822585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.823500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.829653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:25.964916Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:26.063824Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:24.184113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:24.308893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:24.326341Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:24.326889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:24.327026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:24.829013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:24.829166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:24.836797Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047861550232 != 1768047861550235 2026-01-10T12:24:24.850621Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:24.899988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:25.018492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:25.330325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:25.343683Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:25.463914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.821519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.821633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.821698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.822227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.823497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:25.829678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:25.958961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:26.040308Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:34.896033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2026-01-10T12:23:46.109819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707591317898346:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.109906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.383207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.434352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.434458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.499482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.513977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.586389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.752007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.752116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.752133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.752227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.109764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.140754Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:48.852728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707599907834308:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.852889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.853273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707599907834317:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.853337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.277838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.449501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801803:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.449756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.449777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801824:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.449813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801825:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.449846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801826:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.449959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801827:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.450233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801828:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.450569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801836:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.450649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.451321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801851:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.451405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.451435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801853:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.452302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801864:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.452352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801880:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.452369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.452980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801887:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.454427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801904:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.454456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801908:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.454520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.455064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801920:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.455131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.455935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801936:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.456017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.456034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604202801939:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.456281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1 ... xecute at 72075186224037891 TxInFly 0 2026-01-10T12:24:29.782471Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-01-10T12:24:29.782570Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:29.782611Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-10T12:24:29.782639Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2026-01-10T12:24:29.782894Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-10T12:24:29.784698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037889 2026-01-10T12:24:29.784991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 131 -> 132 2026-01-10T12:24:29.786502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:24:29.786720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:24:29.786775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:29.787570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710694 2026-01-10T12:24:29.787600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2026-01-10T12:24:29.787617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 7 2026-01-10T12:24:29.794097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-10T12:24:29.794190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-10T12:24:29.794211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-10T12:24:29.794266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2026-01-10T12:24:29.796221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2026-01-10T12:24:29.846081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.846251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 14.999812s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.846326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.999724s, rate# 0, in queue# 1 shards, running# 2 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.846390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, next wakeup# 14.999664s, rate# 0, in queue# 1 shards, running# 3 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.846431Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2026-01-10T12:24:29.847200Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2026-01-10T12:24:29.847269Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037894 2026-01-10T12:24:29.847603Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037892 2026-01-10T12:24:29.856286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 9, next wakeup# 14.989780s, rate# 0, in queue# 0 shards, running# 3 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.858189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 11, next wakeup# 14.987873s, rate# 0, in queue# 0 shards, running# 2 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.858301Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037891 CompletedLoansChanged 2026-01-10T12:24:29.859126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, shardIdx# 72057594046644480:7 in# 13, next wakeup# 14.986940s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.860692Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037893 CompletedLoansChanged 2026-01-10T12:24:29.861522Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037894 CompletedLoansChanged 2026-01-10T12:24:29.864387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, shardIdx# 72057594046644480:5 in# 17, next wakeup# 14.981683s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.869139Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037892 CompletedLoansChanged 2026-01-10T12:24:29.869552Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:29.872969Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:29.873019Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:29.874729Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:29.875088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:29.875277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:29.878343Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:29.880131Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:24:29.880212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:29.880462Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:24:29.881291Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:24:29.881350Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:24:29.885252Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:24:29.885616Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:24:29.886782Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:24:29.886858Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:24:29.959406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.886663s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.959516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 14.886540s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.959667Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2026-01-10T12:24:29.959756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 0, next wakeup# 14.886305s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:29.959981Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591317898707:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2026-01-10T12:24:29.960132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 0, next wakeup# 14.885927s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 Table has 4 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 17369, MsgBus: 20284 2026-01-10T12:24:26.679728Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707766438228131:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:26.680465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:26.713665Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707763624709075:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:26.713720Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:26.718493Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707765699620048:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:26.719082Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:26.952639Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:26.955700Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:26.994627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:27.062570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:27.062690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:27.068110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:27.068234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:27.068400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:27.068430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:27.106260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:27.200283Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:27.200332Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:27.200773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:27.201197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:27.229876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:27.232985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:27.260590Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:27.264845Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:27.346647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:27.346680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:27.346692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:27.346803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:27.687063Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:27.723768Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:27.732296Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:27.845378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:27.919396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.165724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.315361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.394569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:30.635408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707783618099510:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.635536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.635855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707783618099520:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:30.635921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.024186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.067574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.120418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.171342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.213705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.264696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.320527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.382016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.466717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707787913067912:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.466767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.466840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707787913067917:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.466961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707787913067919:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.467054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:31.470611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:31.490262Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707787913067921:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:31.567848Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707787913067997:4747] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:31.678550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707766438228131:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:31.678641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:31.713955Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707763624709075:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:31.714056Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:31.718514Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593707765699620048:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:31.718595Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:21:58.561298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:21:58.561362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:21:58.561408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:21:58.561439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:21:58.561456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:21:58.561494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:21:58.561538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:21:58.562105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:21:58.562894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:21:58.624338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:21:58.624394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:58.635505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:21:58.635576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:21:58.636035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:21:58.645165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:21:58.647005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:21:58.650326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:21:58.654291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:21:58.661574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.662689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:21:58.671440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:21:58.671552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:21:58.671591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:21:58.671696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:21:58.671902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:21:58.794725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.796989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:21:58.797135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0 TxId: 281474976725763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2026-01-10T12:24:36.686317Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-10T12:24:36.686484Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1770:3618], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-10T12:24:36.687052Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976725763:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725763 msg type: 269090816 2026-01-10T12:24:36.687203Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976725763, partId: 4294967295, tablet: 72075186233409550 2026-01-10T12:24:36.687411Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-10T12:24:36.687455Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 0/1, is published: true 2026-01-10T12:24:36.687498Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-10T12:24:36.702741Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 20750, transactions count in step: 1, at schemeshard: 72075186233409549 2026-01-10T12:24:36.702920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725763 AckTo { RawX1: 0 RawX2: 0 } } Step: 20750 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2026-01-10T12:24:36.703027Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409549] TDropLock TPropose opId# 281474976725763:0 HandleReply TEvOperationPlan: step# 20750 2026-01-10T12:24:36.703104Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976725763:0 128 -> 240 2026-01-10T12:24:36.707160Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976725763:0, at schemeshard: 72075186233409549 2026-01-10T12:24:36.707248Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725763:0 ProgressState 2026-01-10T12:24:36.707364Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-10T12:24:36.707410Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-10T12:24:36.707459Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-10T12:24:36.707490Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-10T12:24:36.707537Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 1/1, is published: true 2026-01-10T12:24:36.707627Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1011:2950] message: TxId: 281474976725763 2026-01-10T12:24:36.707684Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-10T12:24:36.707729Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725763:0 2026-01-10T12:24:36.707786Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725763:0 2026-01-10T12:24:36.707880Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 6 2026-01-10T12:24:36.714302Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725763 2026-01-10T12:24:36.714378Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725763 2026-01-10T12:24:36.714430Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 109, txId# 281474976725763 2026-01-10T12:24:36.714522Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1770:3618], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000}, txId# 281474976725763 2026-01-10T12:24:36.716624Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-10T12:24:36.716822Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1770:3618], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-10T12:24:36.716895Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-10T12:24:36.718902Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done 2026-01-10T12:24:36.719031Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1770:3618], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-10T12:24:36.719084Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 109, subscribers count# 1 2026-01-10T12:24:36.719235Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-10T12:24:36.719306Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [5:3238:4979] TestWaitNotification: OK eventTxId 109 2026-01-10T12:24:36.721131Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-10T12:24:36.721229Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo quantity: 311, 311 unit: "request_unit", "request_unit" type: "delta", "delta" ... unblocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering 2026-01-10T12:24:36.723143Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:338: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson 2026-01-10T12:24:36.723674Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-10T12:24:36.723762Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpSystemView::FailNavigate >> KqpSystemView::NodesSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2026-01-10T12:22:28.448364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:28.449193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:28.449230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:28.458349Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:28.458607Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:28.458786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:28.496287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:28.503049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:28.503676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:28.505344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:28.505397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:28.505440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:28.505775Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:28.505843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:28.505909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:28.577698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:28.604503Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:28.604638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:28.604708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:28.604765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:28.604791Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:28.604815Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.605009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:28.605058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:28.605266Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:28.605341Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:28.605381Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:28.605419Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:28.605442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:28.605463Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:28.605496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:28.605520Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:28.605562Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:28.605620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:28.605649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:28.605700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:28.611603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:28.611663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:28.611737Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:28.611887Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:28.611931Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:28.611984Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:28.612025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:28.612051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:28.612096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:28.612127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:28.612348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:28.612381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:28.612412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:28.612441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:28.612514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:28.612541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:28.612565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:28.612585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:28.612603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:28.624680Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:28.624737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:28.624774Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:28.624826Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:28.624884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:28.625433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:28.625487Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:28.625531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:28.625586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:28.625616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:28.625723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:28.625759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:28.625792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:28.625821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:28.629142Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:28.629213Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:28.629432Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:28.629475Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:28.629535Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:28.629577Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:28.629607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:28.629640Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:28.629675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... 7185 dest 9437184 consumer 9437184 txId 521 2026-01-10T12:24:36.484601Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-10T12:24:36.484638Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.484671Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2026-01-10T12:24:36.484781Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-10T12:24:36.484805Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.484828Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2026-01-10T12:24:36.484961Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-10T12:24:36.484986Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485007Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2026-01-10T12:24:36.485107Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-10T12:24:36.485142Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485165Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2026-01-10T12:24:36.485272Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-10T12:24:36.485294Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485316Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2026-01-10T12:24:36.485417Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-10T12:24:36.485448Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485470Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2026-01-10T12:24:36.485590Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-10T12:24:36.485627Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485650Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2026-01-10T12:24:36.485728Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-10T12:24:36.485759Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.485789Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2026-01-10T12:24:36.485986Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-10T12:24:36.486022Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.486053Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2026-01-10T12:24:36.486227Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-10T12:24:36.486265Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.486295Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2026-01-10T12:24:36.486362Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-10T12:24:36.486389Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.486416Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2026-01-10T12:24:36.486612Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-10T12:24:36.486653Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.486683Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2026-01-10T12:24:36.486871Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-10T12:24:36.486905Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.486948Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2026-01-10T12:24:36.487089Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-10T12:24:36.487130Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.487158Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2026-01-10T12:24:36.487320Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-10T12:24:36.487353Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.487384Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2026-01-10T12:24:36.487476Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-10T12:24:36.487509Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.487538Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2026-01-10T12:24:36.509895Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:24:36.509973Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2026-01-10T12:24:36.510044Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-10T12:24:36.510121Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:24:36.510166Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:24:36.510392Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:24:36.510427Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2026-01-10T12:24:36.510603Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:24:36.510650Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:24:36.510887Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-10T12:24:36.510943Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:24:36.510985Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 29 22 18 29 28 31 30 24 24 26 22 19 23 29 28 24 26 24 18 9 28 28 13 6 1 29 26 9 24 5 29 - actual 29 22 18 29 28 31 30 24 24 26 22 19 23 29 28 24 26 24 18 9 28 28 13 6 1 29 26 9 24 5 29 - interm 29 22 18 29 28 22 30 24 24 26 22 19 23 29 28 24 26 24 18 9 28 28 13 6 1 29 26 9 24 5 29 - |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpSysColV1::UpdateAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 13211, MsgBus: 23841 2026-01-10T12:24:31.797424Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707787859630684:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:31.797587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:32.019189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:32.063510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:32.063671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:32.088597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:32.137536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:32.142025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707787859630655:2081] 1768047871795691 != 1768047871795694 2026-01-10T12:24:32.188651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:32.188680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:32.188686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:32.188768Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:32.280659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:32.607868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:32.621447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:32.685213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:32.791078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:32.802063Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:32.941367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:33.004051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.018873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707805039501724:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.018985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.019435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707805039501734:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.019510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.328831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.366667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.405670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.440009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.471628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.507490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.575453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.620103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.693568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707805039502608:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.693655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.693947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707805039502613:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.693956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707805039502614:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.694027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.698310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:35.709407Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707805039502617:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:35.799625Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707805039502668:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:36.798634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707787859630684:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:36.798719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:37.210910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:24:37.367647Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047877362, txId: 281474976715674] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSysColV1::SelectRange [GOOD] >> KqpSysColV1::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 3927, MsgBus: 8877 2026-01-10T12:24:30.368379Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707780440852784:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:30.368788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:30.407906Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707779839984429:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:30.410319Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:30.423892Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707782581504430:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:30.424025Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:30.437512Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:30.602904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:30.656923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:30.658535Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:30.707098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:30.707279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:30.710639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:30.710733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:30.710921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:30.710984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:30.727443Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:30.727498Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:30.727733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:30.728829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:30.744654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:30.826359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:30.853144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:30.896905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:30.920398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:30.920423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:30.920429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:30.920506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:30.954366Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:31.381970Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:31.410972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:31.423901Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:31.431620Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:31.488764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.719284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:31.909151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:32.012402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.241012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707797620724226:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:34.241114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:34.241410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707797620724236:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:34.241474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:34.667440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.725291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.788899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.864274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.935902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.012439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.134742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.225157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.342664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707801915692627:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.342751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.342829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707801915692632:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.345124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707801915692634:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.345217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:35.347128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:35.368873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707780440852784:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:35.368959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:35.383353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707801915692635:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:35.407756Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707779839984429:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:35.407820Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:35.424180Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593707782581504430:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:35.424264Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:35.444461Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707801915692721:4739] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:37.346082Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047877333, txId: 281474976710673] shutting down |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSystemView::CompileCacheQueriesOrderByDesc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 23082, MsgBus: 20723 2026-01-10T12:24:33.994672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707792389982615:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.994941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.209267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.212578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.212714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.274774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.358915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.408029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.408054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.408062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.408141Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.414742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.828056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.890197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.004257Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.027361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.196338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.279907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.178710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707809569853620:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.178838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.179204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707809569853629:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.179249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.511328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.546696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.574882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.608167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.641599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.680092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.741453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.780297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.863312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707809569854502:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.863396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.863603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707809569854507:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.863640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707809569854508:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.863709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.867318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:37.878358Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707809569854511:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:37.963945Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707809569854562:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:38.995151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707792389982615:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.995225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 12993, MsgBus: 13566 2026-01-10T12:24:33.743800Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707793394837967:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.744253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.973327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:33.980064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:33.980225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.028837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.068041Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.069278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707793394837937:2081] 1768047873742174 != 1768047873742177 2026-01-10T12:24:34.158099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.158120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.158127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.158234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.199168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.561482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.571249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:34.620093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.750732Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.751992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.905435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.976556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.878137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707806279741701:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.878238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.878620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707806279741711:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.878663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.265920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.296122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.324301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.356487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.405973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.465321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.501557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.547241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.628473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810574709882:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.628591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.628708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810574709887:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.628883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810574709889:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.628918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.632481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:37.648015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707810574709890:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:37.718295Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707810574709942:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:38.743806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707793394837967:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.743883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.403519Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047879437, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 3631, MsgBus: 17791 2026-01-10T12:24:34.008943Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707798336143212:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.011850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.251575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.251712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.282348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.282771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.286299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707794041175869:2081] 1768047874006728 != 1768047874006731 2026-01-10T12:24:34.301493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.480803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.480824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.480830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.480900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.553355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.918721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.930500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:35.011380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.021449Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.175569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.356465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.423041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.170524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811221046928:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.170678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.173517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811221046938:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.173609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.532569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.566968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.599743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.643465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.683631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.725453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.764344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.839821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.927764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811221047807:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.927839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.927943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811221047812:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.928052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811221047814:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.928097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.931690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:37.943347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707811221047816:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:38.020954Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707815516015163:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:39.009155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707798336143212:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.009230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.503272Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047879542, txId: 281474976710673] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 22425, MsgBus: 15100 2026-01-10T12:24:33.933519Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707794732122994:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.934344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.187822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.198674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.198804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.255002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.331970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.442923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.442946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.442965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.443030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.454883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.836744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.855384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:24:34.905595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:34.941482Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.042434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.243991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.325378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.331046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811911994026:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.331162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.331487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811911994036:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.331542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.646963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.683892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.722706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.753219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.792034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.826376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.861827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.918782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.994347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811911994906:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.994429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.994665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811911994911:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.994675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707811911994912:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.994718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.998182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.009091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707811911994915:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:38.083464Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707816206962262:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:38.933901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707794732122994:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.933969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpSystemView::Sessions+EnableRealSystemViewPaths >> KqpSysColV1::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 28665, MsgBus: 14696 2026-01-10T12:24:34.204813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707799413179731:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.204895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.235896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:34.504496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.504650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.509805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.594670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.596479Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.602438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707799413179703:2081] 1768047874203123 != 1768047874203126 2026-01-10T12:24:34.688763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.688784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.688790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.688880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.889583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.148961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:35.219832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.221387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.414565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.582927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.672800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.470008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707812298083469:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.470119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.470544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707812298083479:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.470580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.820808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.880862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.913146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.940352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.972763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.007970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.041292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.114382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.180713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707816593051643:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.180799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.180880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707816593051648:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.181011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707816593051650:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.181054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.184676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.195998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707816593051652:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:38.262115Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707816593051705:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:39.205179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707799413179731:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.205260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:40.087993Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047880080, txId: 281474976710673] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 26832, MsgBus: 12125 2026-01-10T12:24:34.171773Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707797279702006:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.171913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.422522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.422643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.460156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.474535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.479063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707797279701969:2081] 1768047874169568 != 1768047874169571 2026-01-10T12:24:34.494165Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.601058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.601086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.601092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.601171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.732894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.140501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:35.158533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:35.181806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.211454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.342085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.518371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.587151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.451063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810164605729:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.451192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.451598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810164605739:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.451644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.774863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.812000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.846809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.875557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.905446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.945287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.980448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.026933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.103112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814459573911:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.103211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.103954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814459573917:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.104025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814459573916:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.104061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.108425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.120140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707814459573920:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:38.183607Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707814459573971:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:39.171920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707797279702006:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.171988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:40.038001Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047880074, txId: 281474976715673] shutting down |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::FailResolve [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> BasicStatistics::Serverless [GOOD] |94.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 2707, MsgBus: 30112 2026-01-10T12:24:35.682015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707801865575604:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:35.682559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:35.940365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.940477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.998114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:36.006456Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707801865575501:2081] 1768047875676904 != 1768047875676907 2026-01-10T12:24:36.014180Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:36.017136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:36.109807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:36.109823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:36.109827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:36.109893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:36.282535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:36.531239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:36.537615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:36.603729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.701973Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:36.772555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.940729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.013202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.034176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819045446562:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.034305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.034606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819045446572:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.034664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.313422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.346217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.384498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.416999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.446815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.477734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.510544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.551364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.616904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819045447441:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.616993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.617114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819045447446:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.617285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819045447448:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.617338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.620840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:39.631402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707819045447450:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:39.703605Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707819045447503:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:40.681287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707801865575604:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:40.681369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::QueryStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 5293, MsgBus: 6458 2026-01-10T12:24:33.957596Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707793416071154:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.957649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.231585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.262194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.262349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.316018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.318510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.456417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.456438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.456447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.456542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.467454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.939939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.964345Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.018826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.169850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.341944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.408766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.263319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810595942172:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.263465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.266016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707810595942182:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.266122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.640522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.674721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.710852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.741900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.779676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.819931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.853454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.925804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.011781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814890910350:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.011880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.012161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814890910356:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.012210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.016537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814890910355:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.028934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.043535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707814890910359:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:38.120929Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707814890910410:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:38.958162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707793416071154:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.958228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] >> KqpSystemView::NodesRange2 [GOOD] >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] >> KqpSystemView::NodesRange1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 29756, MsgBus: 28735 2026-01-10T12:24:36.654461Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707808188910570:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:36.656325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:36.684293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:36.934708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:36.934790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:36.956537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:36.969995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:37.010413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:37.011389Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707808188910543:2081] 1768047876653075 != 1768047876653078 2026-01-10T12:24:37.088150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:37.088189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:37.088201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:37.088317Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:37.244544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:37.474542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:37.547805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.661031Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:37.696575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.878382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.947734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.847167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821073814309:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.847282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.847718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821073814319:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.847773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.210535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.237984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.267624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.298861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.325380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.384329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.414338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.459449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.525906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825368782483:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.525964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.525983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825368782488:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.526160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825368782490:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.526267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.529548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:40.539745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707825368782491:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:40.616648Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707825368782543:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:41.654543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707808188910570:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:41.654629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:42.049370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:24:42.193917Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7593707833958717524:4044], for# user0@builtin, access# SelectRow 2026-01-10T12:24:42.194035Z node 1 :KQP_EXECUTER ERROR: kqp_table_resolver.cpp:274: TxId: 281474976710675. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 30] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-10T12:24:42.218673Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2RjODU0MjgtYWEyMDNhZGQtZDMxYmY2MGQtMTRkZWM4MjY=, ActorId: [1:7593707833958717497:2535], ActorState: ExecuteState, LegacyTraceId: 01kekxvbtq4m9f4xwz81qyhrbv, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Failed to resolve table `/Root/.sys/partition_stats` status: AccessDenied." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-10T12:24:42.218985Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047882193, txId: 281474976710674] shutting down >> KqpSysColV0::InnerJoinSelectAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 28131, MsgBus: 2732 2026-01-10T12:24:36.565228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707808438799701:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:36.565577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:36.816501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:36.816609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:36.843495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:36.872483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:36.897704Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707808438799578:2081] 1768047876555590 != 1768047876555593 2026-01-10T12:24:36.898615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:37.002474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:37.002499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:37.002518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:37.002594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:37.166671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:37.440300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:37.501511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.569554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:37.619737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.772668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.846775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.827080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821323703342:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.827192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.827470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821323703352:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.827551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.143212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.168201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.194455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.219441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.247596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.277241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.325655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.363630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.426593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825618671522:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.426665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.426776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825618671527:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.426818Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825618671528:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.426906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.430299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:40.438815Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707825618671531:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:40.518754Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707825618671582:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:41.564195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707808438799701:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:41.564253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:42.433417Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047882468, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:23:58.734128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:23:58.873680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:23:58.895142Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:23:58.895718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:23:58.895799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:23:59.499081Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:59.629589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:59.629696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:59.665340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:59.773576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:00.894544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:00.895060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:00.895107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:00.895149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:00.895636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:00.974069Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:01.609351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:04.800961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:04.801055Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:04.815067Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:04.895949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.896091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.940174Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:04.941430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.150773Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.150932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.157844Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.159653Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.160292Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.161350Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.161478Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.161685Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.161766Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.161997Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.162120Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.182580Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.435493Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:05.483087Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:05.483164Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:05.520375Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:05.521970Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:05.522207Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:05.522272Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:05.522323Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:05.522390Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:05.522449Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:05.522498Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:05.523717Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:05.558339Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.558499Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.582753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2160:2619] 2026-01-10T12:24:05.583332Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2160:2619], schemeshard id = 72075186224037897 2026-01-10T12:24:05.682277Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2251:2667] 2026-01-10T12:24:05.685169Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:24:05.708561Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Describe result: PathErrorUnknown 2026-01-10T12:24:05.708634Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Creating table 2026-01-10T12:24:05.708729Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:24:05.717755Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2307:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:05.724050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:05.741934Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:05.742109Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:05.756018Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:05.956665Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:24:06.028222Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:06.438043Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:06.568684Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:06.568794Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2256:2671] Owner: [2:2255:2670]. Column diff is empty, finishing 2026-01-10T12:24:07.201086Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... atType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:26.532496Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-10T12:24:26.532543Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [2:4359:3718], StatRequests.size() = 1 2026-01-10T12:24:27.646889Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:4391:3729]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:27.647365Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:24:27.647424Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4391:3729], StatRequests.size() = 1 2026-01-10T12:24:28.608798Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:28.652143Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4423:3744]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:28.652517Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:24:28.652581Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4423:3744], StatRequests.size() = 1 2026-01-10T12:24:29.663802Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4455:3759]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:29.664231Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:24:29.664285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4455:3759], StatRequests.size() = 1 2026-01-10T12:24:30.532895Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:30.533415Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-10T12:24:30.533825Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:30.533965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:24:30.579042Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4483:3770]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:30.579485Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:24:30.579540Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4483:3770], StatRequests.size() = 1 2026-01-10T12:24:31.590673Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4517:3786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:31.591095Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:24:31.591153Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4517:3786], StatRequests.size() = 1 2026-01-10T12:24:32.570593Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4549:3798]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:32.570991Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:24:32.571043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4549:3798], StatRequests.size() = 1 2026-01-10T12:24:33.659122Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:33.802196Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4590:3813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:33.802586Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:24:33.802641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4590:3813], StatRequests.size() = 1 2026-01-10T12:24:33.814876Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:33.814948Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:33.814996Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:33.815031Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:35.054303Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4625:3829]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:35.054685Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:24:35.054732Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4625:3829], StatRequests.size() = 1 2026-01-10T12:24:36.300986Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:36.301353Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-10T12:24:36.301869Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:36.301971Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:24:36.328925Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:36.329020Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:36.329348Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:36.349631Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:36.420607Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4656:3842]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:36.421079Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:24:36.421136Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4656:3842], StatRequests.size() = 1 2026-01-10T12:24:37.628762Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4690:3858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:37.629219Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:24:37.629275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4690:3858], StatRequests.size() = 1 2026-01-10T12:24:38.188202Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:24:38.188279Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.525000s, at schemeshard: 72075186224037899 2026-01-10T12:24:38.188446Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:24:38.202385Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:38.857171Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4722:3872]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:38.857555Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:24:38.857610Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4722:3872], StatRequests.size() = 1 2026-01-10T12:24:39.998831Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:40.054171Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4756:3886]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:40.054533Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:24:40.054592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4756:3886], StatRequests.size() = 1 2026-01-10T12:24:41.246217Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4794:3902]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:41.246597Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:24:41.246652Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4794:3902], StatRequests.size() = 1 2026-01-10T12:24:42.251856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:42.252477Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:24:42.252841Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:42.252974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:42.275568Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:42.275650Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:42.275983Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:42.290380Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:42.323413Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4826:3916]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:42.323729Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:24:42.323767Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4826:3916], StatRequests.size() = 1 >> KqpSystemView::FailNavigate [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 20155, MsgBus: 30854 2026-01-10T12:24:34.938276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707797044395516:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.938373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:35.210609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:35.226027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.226169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.228117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.341938Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:35.346788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707797044395472:2081] 1768047874936028 != 1768047874936031 2026-01-10T12:24:35.366395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:35.366412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:35.366416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:35.366497Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:35.503946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.833206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:35.857492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:24:35.947375Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 1 2026-01-10T12:24:38.862374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814224265616:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.862502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814224265608:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.862891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.863195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707814224265624:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.863267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.867622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.880803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707814224265622:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:24:39.005043Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707814224265677:2350] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-01-10T12:24:39.938125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707797044395516:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.938218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:43.351577Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047883346, txId: 281474976715673] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 29562, MsgBus: 7596 2026-01-10T12:24:40.286878Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707822620825744:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:40.287013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:40.498203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:40.530622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:40.530778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:40.578533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:40.605586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707822620825715:2081] 1768047880285138 != 1768047880285141 2026-01-10T12:24:40.606625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:40.669977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:40.670024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:40.670044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:40.670147Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:40.693830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:41.011712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:41.024224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:24:41.293484Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:43.368475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707835505728575:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.368483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707835505728567:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.368581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.368841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707835505728582:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.368901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.372559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:43.384820Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707835505728581:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:24:43.474480Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707835505728634:2581] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BasicStatistics::Simple [GOOD] >> KqpSysColV0::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 18456, MsgBus: 18015 2026-01-10T12:24:33.688913Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707793570797285:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.689019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.726814Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707794861852929:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.727089Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.735745Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707792802867568:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.736129Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.764618Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593707796234962330:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.764780Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.937210Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:33.936784Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:33.936951Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:33.983722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:33.996512Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.001073Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.124373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.124488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.140579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.140667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.142882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.142943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.143091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.143137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.143281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.143343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.167700Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:24:34.167893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.180539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.181723Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:24:34.181800Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:34.181837Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:34.189001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.189218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.189880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.387133Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.389178Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.407219Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.408834Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.412678Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.432594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.699457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.720802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.720823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.720829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.721103Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.740193Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.751554Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.901364Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.912845Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.706905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:35.818074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.067591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.276109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.360500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.698831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707793570797285:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.698955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:38.700355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707815045635900:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.700484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.700869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707815045635913:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.700944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.726356Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707794861852929:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.726429Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:38.734511Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593707792802867568:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.734586Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:38.764215Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593707796234962330:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:38.764313Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:38.972198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.081254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.129663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.177751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.226978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.280565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.332294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.417793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.517893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819340604170:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.517987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.518205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819340604176:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.518242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707819340604175:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.518278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.522170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:39.542837Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707819340604179:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:39.626815Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707819340604257:4581] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:41.432627Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047881427, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 29592, MsgBus: 18371 2026-01-10T12:24:34.425754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707800016137579:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.426547Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.458160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:34.480421Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707798766562101:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.480487Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.500289Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707800626063445:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.509755Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.528498Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593707798451162876:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.539401Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.554920Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593707799522555708:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.554978Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.937801Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.941680Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.941950Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.942468Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.984108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:35.056046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.056190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.056476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.056755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.071413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.072194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.072427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.072495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.077795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:35.077900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:35.133541Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-10T12:24:35.133767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.134026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.221485Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:35.223822Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.222745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.222911Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:35.223558Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:35.224165Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.230362Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.233656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.234750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.235028Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:24:35.235707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.238231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:35.491165Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.508261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.513079Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.552839Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.625784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:35.625809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:35.625829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:35.625927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:35.641795Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:36.265554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:36.377370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.682665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:36.925589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.038435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.373734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821490976104:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.373875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.374307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707821490976114:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.374398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:39.425580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707800016137579:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.425642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.479869Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593707798766562101:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.479963Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.500867Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593707800626063445:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.500964Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.528500Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593707798451162876:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.528593Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.555117Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593707799522555708:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.555189Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:39.702604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.750308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.791275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.891056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.937478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.996148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.048340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.111881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.195473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825785944368:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.195603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.195794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825785944374:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.195841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707825785944373:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.195875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:40.199584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976735670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:40.220685Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707825785944377:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976735670 completed, doublechecking } 2026-01-10T12:24:40.277852Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707825785944455:4557] txid# 281474976735671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:42.300242Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047882294, txId: 281474976735673] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 26438, MsgBus: 24010 2026-01-10T12:24:39.020740Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707820512723752:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.020827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:39.198391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:39.219188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:39.219339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:39.277285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:39.292515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:39.296642Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707820512723729:2081] 1768047879019978 != 1768047879019981 2026-01-10T12:24:39.379964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:39.380004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:39.380014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:39.380120Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:39.429451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:39.735021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:39.781627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:39.926956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.047085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:40.099873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.162179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.154249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707833397627500:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.154393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.154772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707833397627510:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.154884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.554792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.586553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.616719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.648918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.681082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.714020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.770364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.816193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.892055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707833397628375:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.892135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.892209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707833397628380:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.892353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707833397628382:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.892389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.896626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:42.907415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707833397628383:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:42.981162Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707833397628435:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:44.020960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707820512723752:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:44.021030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:44.616712Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7593707841987563347:3985], for# user0@builtin, access# DescribeSchema 2026-01-10T12:24:44.616887Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7593707841987563347:3985], for# user0@builtin, access# DescribeSchema 2026-01-10T12:24:44.628717Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707841987563337:2538], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:24:44.629182Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDUxNWZhZi1iZWM0NTlkNi1kYzJiYWQ3MC1jNjBlNjg2Mw==, ActorId: [1:7593707841987563330:2534], ActorState: ExecuteState, LegacyTraceId: 01kekxve8pd6e5348x9075g503, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/.sys/partition_stats]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> UpsertLoad::ShouldWriteKqpUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsert >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> ReadLoad::ShouldReadIterate >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> UpsertLoad::ShouldWriteKqpUpsert >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 65389, MsgBus: 4373 2026-01-10T12:24:39.707141Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707819936300989:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.707975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:39.896991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:39.911292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:39.911385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:39.980587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:39.983019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:39.984508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707819936300886:2081] 1768047879703320 != 1768047879703323 2026-01-10T12:24:40.062403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:40.067182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:40.067209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:40.067223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:40.067451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:40.450814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:40.498034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.655611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.726284Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:40.797715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.856881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.742796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707832821204656:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.742962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.743318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707832821204666:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.743376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.108866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.133236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.158530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.190448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.221881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.278986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.310118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.351373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.423446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707837116172834:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.423515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.423520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707837116172839:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.423685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707837116172841:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.423724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.426628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:43.436731Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707837116172842:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:43.526600Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707837116172894:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:44.706340Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707819936300989:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:44.706416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.458910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:03.571905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:03.580795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:03.581025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:03.581235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:04.027618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:04.130733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.130831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.191799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:04.293761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:05.075812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:05.076341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:05.076388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:05.076468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:05.077027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:05.134554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:05.741967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.163251Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.172009Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:09.175671Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:09.210900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.211020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.253996Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:09.255940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.446063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.446168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.448010Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.448918Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.449932Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.450872Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.451115Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.451281Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.451434Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.451706Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.451908Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.467851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.733500Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:09.776514Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:09.776629Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:09.828071Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:09.829048Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:09.829285Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:09.829336Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:09.829425Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:09.829478Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:09.829521Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:09.829578Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:09.830108Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:09.847955Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:09.848071Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:09.878854Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:24:09.879164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:24:09.962015Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:24:09.968964Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:09.992171Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:24:09.992252Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:24:09.992349Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:09.998736Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.004487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.014721Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.014901Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.037733Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:10.198422Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:10.289532Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:10.681351Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:10.782029Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:10.782122Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:24:11.390254Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:24:29.064876Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3650:3314]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:29.065211Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:24:29.065259Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3650:3314], StatRequests.size() = 1 2026-01-10T12:24:30.127423Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3685:3328]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:30.127677Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:24:30.127719Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3685:3328], StatRequests.size() = 1 2026-01-10T12:24:31.156975Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3714:3340]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:31.157263Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:24:31.157297Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3714:3340], StatRequests.size() = 1 2026-01-10T12:24:31.654744Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:32.206795Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3745:3354]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:32.207194Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:24:32.207248Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3745:3354], StatRequests.size() = 1 2026-01-10T12:24:33.181974Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3772:3364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:33.182296Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:24:33.182346Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3772:3364], StatRequests.size() = 1 2026-01-10T12:24:33.629961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:33.630104Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:24:33.630631Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:33.630769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:24:34.180791Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3799:3374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:34.181077Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:24:34.181123Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3799:3374], StatRequests.size() = 1 2026-01-10T12:24:35.291833Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3831:3386]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:35.292163Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:24:35.292215Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3831:3386], StatRequests.size() = 1 2026-01-10T12:24:36.363340Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:3862:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:36.363667Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:24:36.363720Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:3862:3396], StatRequests.size() = 1 2026-01-10T12:24:37.029714Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:37.125869Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:37.125953Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:37.126002Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:37.126049Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:37.740399Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:3898:3411]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:37.740739Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:24:37.740790Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:3898:3411], StatRequests.size() = 1 2026-01-10T12:24:38.853330Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:3929:3423]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:38.853685Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:24:38.853738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:3929:3423], StatRequests.size() = 1 2026-01-10T12:24:39.451443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:39.451599Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-10T12:24:39.452142Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:39.452257Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:24:39.463281Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:39.463362Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:39.463623Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:39.477440Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:40.103186Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:3958:3435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:40.103452Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:24:40.103483Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:3958:3435], StatRequests.size() = 1 2026-01-10T12:24:41.165760Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:3987:3447]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:41.166064Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:24:41.166115Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:3987:3447], StatRequests.size() = 1 2026-01-10T12:24:42.151009Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4016:3457]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:42.151303Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:24:42.151340Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4016:3457], StatRequests.size() = 1 2026-01-10T12:24:42.671392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:43.224839Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4049:3469]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:43.225109Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:24:43.225150Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4049:3469], StatRequests.size() = 1 2026-01-10T12:24:44.209087Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4078:3481]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:44.209441Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-10T12:24:44.209487Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4078:3481], StatRequests.size() = 1 2026-01-10T12:24:44.681516Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:44.681932Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:24:44.682217Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:44.682353Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:44.704259Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:44.704331Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:44.704591Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:44.718135Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:45.239134Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4109:3495]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:45.239414Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-10T12:24:45.239462Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4109:3495], StatRequests.size() = 1 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 13510, MsgBus: 22824 2026-01-10T12:24:39.531759Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707818929885687:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.531943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:39.704615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:39.735533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:39.735653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:39.779940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:39.784342Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:39.846150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:39.846172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:39.846178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:39.846363Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:39.892904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:40.246014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:40.302500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.422635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.553518Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:40.563750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:40.631382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.441391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707831814789337:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.441545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.441918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707831814789347:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.441992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:42.752441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.786159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.814626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.845254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.871854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.899979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.930901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.998182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.075291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707836109757515:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.075361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707836109757520:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.075368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.075625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707836109757523:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.075661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:43.079497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:43.093412Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707836109757522:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:43.177892Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707836109757575:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:44.531434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707818929885687:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:44.531515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] >> BasicStatistics::TwoServerlessDbs [GOOD] >> KqpSystemView::Join [GOOD] >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 4761, MsgBus: 21734 2026-01-10T12:24:41.434552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707829182601517:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:41.435188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:41.631805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:41.640486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:41.640616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:41.691675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:41.742667Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:41.744669Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707829182601487:2081] 1768047881432985 != 1768047881432988 2026-01-10T12:24:41.796909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:41.796936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:41.796945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:41.797059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:41.840057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:42.201056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:42.246343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.357692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.460140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:42.504661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.566546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.368023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707842067505246:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.368150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.368515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707842067505256:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.368568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.706472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.730197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.755012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.781423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.810005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.840126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.895441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.927619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.995712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707842067506124:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.995788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.995886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707842067506129:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.995970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707842067506130:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.996010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.999284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:45.009755Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707842067506133:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:45.081461Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707846362473480:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:46.434639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707829182601517:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:46.434695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 26792, MsgBus: 6559 2026-01-10T12:24:41.768533Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707827561369237:2133];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:41.768757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:41.955919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:41.982228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:41.982347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:42.059751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:42.075747Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:42.077011Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707827561369144:2081] 1768047881765783 != 1768047881765786 2026-01-10T12:24:42.157034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:42.183896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:42.183926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:42.183933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:42.184009Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:42.569025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:42.624929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.761781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.777668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:42.927049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:42.994299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.829351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707840446272905:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.829498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.829923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707840446272915:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:44.829992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.106978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.139341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.172135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.208560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.237456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.270649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.331454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.377152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.446352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844741241084:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.446404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.446579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844741241089:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.446686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844741241091:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.446758Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.450514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:45.460944Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707844741241093:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:45.517670Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707844741241144:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:46.770005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707827561369237:2133];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:46.770061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> UpsertLoad::ShouldCreateTable |94.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2026-01-10T12:23:46.171880Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707591589106900:2190];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.172045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.411788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.468562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.468657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.518988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.587022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.601956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.754504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.754545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.754555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.754673Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.105520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.173743Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:48.968565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707600179042744:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.968654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.968969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707600179042754:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.969028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.277806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.425156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604474010215:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604474010220:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604474010221:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.425563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.432869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:23:49.452606Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707604474010224:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:23:49.588491Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707604474010296:3122] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:23:51.169479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707591589106900:2190];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:51.169565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:01.371420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:24:01.371452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:21.143546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976712661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-10T12:24:24.335275Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.1986 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-10T12:24:29.335936Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.1986 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-10T12:24:34.342381Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 8.6988 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-10T12:24:34.444423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 134562, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 86988, cpuUsageThreshold: 50000) 2026-01-10T12:24:34.444454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-10T12:24:34.448072Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2279: Started key access sampling at datashard: 72075186224037888 2026-01-10T12:24:34.448308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 134562 row count 2000 buckets 9 ready 1 2026-01-10T12:24:34.448378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-01-10T12:24:39.342116Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 8.6988 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-10T12:24:39.442513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 134562, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 86988, cpuUsageThreshold: 50000) 2026-01-10T12:24:39.442542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-10T12:24:39.443047Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2281: Extended key access sampling at datashard: 72075186224037888 2026-01-10T12:24:39.443193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 134562 row count 2000 buckets 9 ready 1 2026-01-10T12:24:39.443250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-01-10T12:24:44.342527Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 8.6988 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-10T12:24:44.444192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 134562, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 86988, cpuUsageThreshold: 50000) 2026-01-10T12:24:44.444215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-10T ... RD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\361\327\003m\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000\361\327\003m\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000\361\327\003m\000\000\000\200" } SchemeshardId: 72057594046644480 2026-01-10T12:24:44.446896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:24:44.449655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-10T12:24:44.454578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:24:44.454880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:24:44.454952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 2 -> 3 2026-01-10T12:24:44.456900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-10T12:24:44.462634Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7593707840697352746:3916] 2026-01-10T12:24:44.462749Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7593707840697352753:3861] 2026-01-10T12:24:44.480207Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:24:44.480317Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-10T12:24:44.480518Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-10T12:24:44.484460Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:24:44.484534Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:24:44.484662Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:24:44.484907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2026-01-10T12:24:44.488836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2026-01-10T12:24:44.488869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 3 -> 131 2026-01-10T12:24:44.490232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:44.508642Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-10T12:24:44.508646Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-10T12:24:44.508749Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:44.508762Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:44.508810Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:24:44.508810Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:24:44.508844Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-10T12:24:44.508845Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-10T12:24:44.509207Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-10T12:24:44.509230Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-10T12:24:44.511681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2026-01-10T12:24:44.512025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 131 -> 132 2026-01-10T12:24:44.513570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:24:44.513841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:24:44.513908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:44.516527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-01-10T12:24:44.516571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-01-10T12:24:44.516591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-10T12:24:44.521666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-10T12:24:44.521740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:44.521772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-10T12:24:44.521814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-10T12:24:44.523089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-10T12:24:44.610639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:44.610822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.999769s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:44.610987Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591589107124:2196] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-10T12:24:44.612665Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7593707591589107124:2196] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-10T12:24:44.621861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 11, next wakeup# 14.988742s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-10T12:24:44.625043Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-10T12:24:44.627314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 16, next wakeup# 14.983291s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-10T12:24:44.633904Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-10T12:24:44.645054Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:44.648717Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:44.659499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:44.666089Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-10T12:24:44.666478Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:24:44.667052Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-10T12:24:44.667171Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-10T12:24:46.470057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2026-01-10T12:24:46.470155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2026-01-10T12:24:46.470208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 Table has 2 shards |94.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpSystemView::QueryStatsScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 7822, MsgBus: 13154 2026-01-10T12:24:43.046910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707836698103097:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:43.046978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:43.248925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:43.249031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:43.316455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:43.350271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:43.350702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:43.360981Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707836698103069:2081] 1768047883045484 != 1768047883045487 2026-01-10T12:24:43.399372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:43.399459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:43.399473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:43.399579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:43.533436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:43.770175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:43.842075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:43.988313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.083958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:44.124425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.184314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.757174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707845288039525:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.757370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.757769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707845288039535:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.757824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.137859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.167162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.195515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.224645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.252828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.283759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.332393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.370572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.435120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707849583007703:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.435209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.435275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707849583007708:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.435440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707849583007710:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.435504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.438540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:46.448096Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707849583007712:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:46.538634Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707849583007763:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:48.047055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707836698103097:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:48.047130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 30181, MsgBus: 17277 2026-01-10T12:24:43.933391Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707836551264225:2103];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:43.933555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:44.143781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:44.143905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:44.146986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:44.217684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:44.237269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:44.283602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:44.283652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:44.283661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:44.283774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:44.407698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:44.696666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:44.748743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.862693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.969895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:45.020538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.087777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.638711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707849436167918:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.638843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.639171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707849436167927:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.639219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.898271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.927521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.955809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.984147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.012149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.045209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.093039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.138462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.200830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707853731136097:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.200883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707853731136102:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.200948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.201133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707853731136105:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.201188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.204035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:47.214182Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707853731136104:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:47.289529Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707853731136157:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 11532, MsgBus: 6993 2026-01-10T12:24:43.018639Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707838184450806:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:43.019500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:43.053239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:43.253135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:43.253223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:43.253888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:43.270415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:43.347359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:43.349849Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707838184450774:2081] 1768047883016251 != 1768047883016254 2026-01-10T12:24:43.423225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:43.423251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:43.423258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:43.423353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:43.532216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:43.818485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:43.885292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.028425Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:44.033266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.172093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.228441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.171877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851069354531:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.172004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.172307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851069354541:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.172370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.423677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.454003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.479152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.504851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.530771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.561267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.615376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.651993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.714453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851069355410:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.714556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.714558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851069355415:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.714747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851069355417:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.714792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.717620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:46.728053Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707851069355418:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:46.814425Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707851069355470:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:48.018889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707838184450806:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:48.018985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:00.765548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:00.888071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:00.894832Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:00.895103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:00.895167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:01.285196Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:01.400257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:01.400386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:01.434975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:01.521839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:02.304105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:02.305628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:02.305685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:02.305729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:02.305939Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:02.366794Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:02.938120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:06.292305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:06.302736Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:06.307061Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:06.356568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.356717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.404803Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:06.408975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:06.614665Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.614812Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.616575Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.617525Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.622702Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.623712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.624178Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.624306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.624520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.624782Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.624916Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:06.641585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:06.940328Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:06.973620Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:06.973739Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:07.019032Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:07.021068Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:07.021355Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:07.021423Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:07.021486Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:07.021549Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:07.021612Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:07.021673Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:07.022558Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:07.023320Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2453] 2026-01-10T12:24:07.027426Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:24:07.036316Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Describe result: PathErrorUnknown 2026-01-10T12:24:07.036388Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Creating table 2026-01-10T12:24:07.036495Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:24:07.038866Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.038981Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2145:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.076298Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2217:2646] 2026-01-10T12:24:07.076558Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2217:2646], schemeshard id = 72075186224037897 2026-01-10T12:24:07.110422Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2248:2657], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:07.125971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:07.146470Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:07.146636Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:07.158741Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:07.427811Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:07.573377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:24:07.942782Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:08.078897Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:08.079001Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2120:2582] Owner: [2:2119:2581]. Column diff is empty, finishing 2026-01-10T12:24:08.856223Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ce_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:24:33.568606Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:5321:4242], StatRequests.size() = 1 2026-01-10T12:24:34.677719Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:5350:4254]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:34.678081Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:24:34.678134Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:5350:4254], StatRequests.size() = 1 2026-01-10T12:24:35.200010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:35.200540Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-10T12:24:35.201089Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:35.201199Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:24:35.876526Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:5381:4268]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:35.877013Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:24:35.877069Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:5381:4268], StatRequests.size() = 1 2026-01-10T12:24:37.100435Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:5418:4285]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:37.100775Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:24:37.100824Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:5418:4285], StatRequests.size() = 1 2026-01-10T12:24:38.259815Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:5453:4299]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:38.260200Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:24:38.260248Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:5453:4299], StatRequests.size() = 1 2026-01-10T12:24:38.877109Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:38.990049Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:38.990138Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:38.990189Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:38.990231Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:39.712672Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:5493:4318]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:39.712941Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:24:39.712974Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:5493:4318], StatRequests.size() = 1 2026-01-10T12:24:40.917118Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:5526:4331]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:40.917506Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:24:40.917563Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:5526:4331], StatRequests.size() = 1 2026-01-10T12:24:41.605054Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:41.605588Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-10T12:24:41.605954Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:41.606036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:24:41.617989Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:41.618091Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:41.618462Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:41.633608Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:42.418206Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:5559:4347]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:42.418554Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:24:42.418599Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:5559:4347], StatRequests.size() = 1 2026-01-10T12:24:43.574890Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:24:43.574958Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.931000s, at schemeshard: 72075186224037899 2026-01-10T12:24:43.575218Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:24:43.588992Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:43.720550Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:5596:4366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:43.720887Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:24:43.720932Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:5596:4366], StatRequests.size() = 1 2026-01-10T12:24:44.841225Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:5627:4378]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:44.841636Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:24:44.841686Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5627:4378], StatRequests.size() = 1 2026-01-10T12:24:45.338178Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-10T12:24:45.338269Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.650000s, at schemeshard: 72075186224037905 2026-01-10T12:24:45.338510Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:24:45.353781Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:45.459095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:46.226698Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5670:4400]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:46.227055Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:24:46.227105Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5670:4400], StatRequests.size() = 1 2026-01-10T12:24:47.471276Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5699:4412]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:47.471609Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:24:47.471643Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5699:4412], StatRequests.size() = 1 2026-01-10T12:24:47.946749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:47.947174Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:24:47.947531Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:47.947596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:47.969394Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:47.969459Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:47.969669Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:47.982860Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:48.631804Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5732:4428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:48.632214Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:24:48.632260Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5732:4428], StatRequests.size() = 1 2026-01-10T12:24:48.632748Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5734:4430]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:48.636417Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:24:48.636478Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5734:4430], StatRequests.size() = 1 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 18471, MsgBus: 2320 2026-01-10T12:24:34.411732Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707799975946089:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:34.412487Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:34.733029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:34.733157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.737581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:34.762074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.773226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.900714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.900739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.900745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.900882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.908997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:35.409092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:35.424007Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:35.424543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:24:35.472148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.605444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.754016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:35.825453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:37.813292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707812860849799:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.813435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.814214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707812860849809:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:37.814344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.150695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.181385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.210308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.244475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.271402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.305511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.339021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.385204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:38.470645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707817155817975:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.470720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.472357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707817155817980:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.472368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707817155817981:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.472424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:38.476574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:38.487810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707817155817984:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:38.544273Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707817155818035:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:39.412222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707799975946089:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:39.412301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:40.141636Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047880134, txId: 281474976715673] shutting down waiting... 2026-01-10T12:24:41.321780Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047881317, txId: 281474976715675] shutting down waiting... 2026-01-10T12:24:42.500389Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047882495, txId: 281474976715677] shutting down waiting... 2026-01-10T12:24:43.671892Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047883667, txId: 281474976715679] shutting down waiting... 2026-01-10T12:24:44.837131Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047884833, txId: 281474976715681] shutting down waiting... 2026-01-10T12:24:46.011776Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047886007, txId: 281474976715683] shutting down waiting... 2026-01-10T12:24:47.247006Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047887165, txId: 281474976715685] shutting down waiting... 2026-01-10T12:24:48.378069Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047888373, txId: 281474976715687] shutting down 2026-01-10T12:24:48.759872Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047888746, txId: 281474976715689] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 13864, MsgBus: 31885 2026-01-10T12:24:42.238388Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707831413361827:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:42.238459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:42.446778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:42.454784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:42.455011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:42.532948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:42.545191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:42.547005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707831413361798:2081] 1768047882236189 != 1768047882236192 2026-01-10T12:24:42.608201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:42.644016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:42.644047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:42.644061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:42.644213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:43.017295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:43.029829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:24:43.246397Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 1 2026-01-10T12:24:45.511095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844298264865:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.511097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844298264873:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.511216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.511482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707844298264880:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.511541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:45.514140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:45.521995Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707844298264879:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:24:45.617826Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707844298264932:2576] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-01-10T12:24:47.238423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707831413361827:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:47.238479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:48.986154Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047888981, txId: 281474976710673] shutting down |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TNebiusAccessServiceTest::Authorize [GOOD] |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TNebiusAccessServiceTest::PassRequestId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 28012, MsgBus: 29051 2026-01-10T12:24:43.915969Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707838917322182:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:43.916205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:44.154590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:44.154726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:44.158533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:44.180171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:44.181083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:44.183382Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707838917322159:2081] 1768047883915111 != 1768047883915114 2026-01-10T12:24:44.275666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:44.275686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:44.275697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:44.275777Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:44.441029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:44.690369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:44.732722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.861150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:44.968260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:45.019704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:45.086234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.690457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851802225917:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.690599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.690979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707851802225927:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.691052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.946853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.975792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.005885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.030549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.058652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.092992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.121034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.172303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.233824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707856097194097:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.233912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.233949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707856097194102:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.234086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707856097194104:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.234128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.237106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:47.245554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707856097194106:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:24:47.329497Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707856097194157:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:49.211042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707838917322182:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:49.211560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:24:49.330318Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047888957, txId: 281474976710673] shutting down 2026-01-10T12:24:49.457438Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047889453, txId: 281474976710676] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:22.217593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:22.328708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:22.337757Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:22.338005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:22.338151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:22.759101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:22.858721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:22.858866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:22.920626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:23.035106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:23.749715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:23.750329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:23.750375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:23.750406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:23.750977Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:23.820318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:24.448586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:27.850172Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:27.858293Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:27.861828Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:27.891945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:27.892058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:27.934266Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:27.936094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.127464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.127573Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.129208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.129852Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.130617Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.131436Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.131745Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.131897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.132036Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.132249Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.132421Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:28.148192Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.399952Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:28.435203Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:28.435291Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:28.477329Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:28.478323Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:28.478586Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:28.478668Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:28.478728Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:28.478793Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:28.478850Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:28.478922Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:28.479575Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:28.493035Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:28.493151Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:28.525815Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:21:28.526178Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:21:28.587445Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:21:28.589954Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:28.600616Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:21:28.600689Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:21:28.600766Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:28.605597Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:28.609759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:28.617285Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:28.617425Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:28.630211Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:28.775248Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:28.847000Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:29.170876Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:29.280344Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:29.280449Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:21:29.886461Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:22:52.509581Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-10T12:22:52.509666Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2026-01-10T12:22:52.509782Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2026-01-10T12:22:52.509848Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:22:52.511235Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:22:52.511609Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:4363:3887] Owner: [2:4362:3886]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:22:52.511677Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:4363:3887] Owner: [2:4362:3886]. Column diff is empty, finishing 2026-01-10T12:22:52.532601Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4370:3892] 2026-01-10T12:22:52.532839Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4370:3892], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-10T12:22:52.533007Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4328:3866], server id = [2:4370:3892], tablet id = 72075186224037894, status = OK 2026-01-10T12:22:52.533121Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4371:3893] 2026-01-10T12:22:52.533203Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4371:3893], schemeshard id = 72075186224037897 2026-01-10T12:22:52.669851Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4376:3896] 2026-01-10T12:22:52.670898Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3164:3435] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-10T12:22:52.670969Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:40: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3164:3435] 2026-01-10T12:22:52.671053Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-10T12:22:52.671318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:22:52.672188Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:22:52.676194Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:22:52.676315Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], Start read next stream part 2026-01-10T12:22:52.717457Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4395:3913]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:22:52.717811Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:22:52.717872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4395:3913], StatRequests.size() = 1 2026-01-10T12:23:54.318619Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxr1049r7t4e88svfh4x0f", SessionId: ydb://session/3?node_id=2&id=Mjg3Y2EwOTUtYjllN2EwMzYtYWJkZDNhYzYtNWU3YzFhNDY=, Slow query, duration: 61.638262s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:23:54.320344Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:23:54.320603Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], Start read next stream part 2026-01-10T12:23:54.321164Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-10T12:23:54.321304Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:54.324892Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:54.324974Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4379:3898], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmZjNTNkYS0xNjQ1N2NhNC1mODc0YzViMS1lMGJjMmYzNg==, TxId: 2026-01-10T12:23:54.325698Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:23:54.325822Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], Start read next stream part 2026-01-10T12:23:54.435481Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4520:4025]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:54.435816Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:54.435869Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4520:4025], StatRequests.size() = 1 2026-01-10T12:24:47.870688Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxsx6n4wvaf5nzfwaghq8n", SessionId: ydb://session/3?node_id=2&id=ZTMxNTMzYS04ZDEzOTQxZS04YjM0ODNjZC1hOTAwODZjMA==, Slow query, duration: 53.540559s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:24:47.872710Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:24:47.872844Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], Start read next stream part 2026-01-10T12:24:47.873027Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32240, txId: 18446744073709551615] shutting down 2026-01-10T12:24:47.873572Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:24:47.873616Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4377:3871], ActorId: [2:4501:4010], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTI3NzQzZjgtNjU1YTdkY2QtODAwNmE4MzktNDVhZWUyNjk=, TxId: 2026-01-10T12:24:47.874329Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4614:4112], ActorId: [2:4616:4114], Starting query actor #1 [2:4617:4115] 2026-01-10T12:24:47.874383Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4616:4114], ActorId: [2:4617:4115], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:24:47.877589Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4616:4114], ActorId: [2:4617:4115], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmE5ZjM5MGEtYmEzMmVlNWQtOGFiZTgxOWItNDlkYzdiYTI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:24:47.936498Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4629:4126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:47.936726Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:24:47.936758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4629:4126], StatRequests.size() = 1 2026-01-10T12:24:48.085429Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4616:4114], ActorId: [2:4617:4115], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmE5ZjM5MGEtYmEzMmVlNWQtOGFiZTgxOWItNDlkYzdiYTI=, TxId: 2026-01-10T12:24:48.085534Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4616:4114], ActorId: [2:4617:4115], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmE5ZjM5MGEtYmEzMmVlNWQtOGFiZTgxOWItNDlkYzdiYTI=, TxId: 2026-01-10T12:24:48.085900Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4614:4112], ActorId: [2:4616:4114], Got response [2:4617:4115] SUCCESS 2026-01-10T12:24:48.086279Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:24:48.133655Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:24:48.133753Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3164:3435] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2026-01-10T12:23:46.109858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707590564375265:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.111013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.387318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.411507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.411682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.438558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.482025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.617023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.753032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.753061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.753068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.753160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.117245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.123190Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Triggering split by load 2026-01-10T12:23:48.996052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707599154311195:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.996170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.996442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707599154311205:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.996528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.277935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.422968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278670:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.423069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.423340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278672:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.423379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.454199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-10T12:23:49.535585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278777:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.535792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.535972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278803:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278804:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278806:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278808:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278810:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278812:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278811:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278817:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.536961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.537637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278837:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.537765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.538423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278850:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.538457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278842:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.538512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278876:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707603449278874:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.539996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.540259Z node 1 :KQP_WORKLOAD_SE ... than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-01-10T12:24:29.563379Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 3.1649 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] 2026-01-10T12:24:34.478755Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 3.4676 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-01-10T12:24:34.563705Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 3.1649 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] 2026-01-10T12:24:39.478779Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 3.4676 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-01-10T12:24:39.563767Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 3.1649 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] Table has 2 shards Fast forward > 10h to trigger the merge 2026-01-10T12:24:44.579650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:560: Propose merge request: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976710694 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037889 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2026-01-10T12:24:44.579862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-10T12:24:44.580548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000$\271\255\214\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000$\271\255\214\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-10T12:24:44.580595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:24:44.583197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-10T12:24:44.587788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-10T12:24:44.587843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 2 -> 3 2026-01-10T12:24:44.590334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2026-01-10T12:24:44.593105Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7593707839672688089:8260] 2026-01-10T12:24:44.608027Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-10T12:24:44.608140Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-10T12:24:44.608249Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-10T12:24:44.612222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710694 TabletId: 72075186224037891 2026-01-10T12:24:44.612265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 3 -> 131 2026-01-10T12:24:44.613665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:44.625990Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-10T12:24:44.626091Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:24:44.626134Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:24:44.626162Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-10T12:24:44.626467Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-10T12:24:44.628711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037890 2026-01-10T12:24:44.628998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037889 2026-01-10T12:24:44.629257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 131 -> 132 2026-01-10T12:24:44.630532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:24:44.630795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:24:44.630879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:24:44.631792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710694 2026-01-10T12:24:44.631843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2026-01-10T12:24:44.631861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-10T12:24:44.636066Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:44.636066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-10T12:24:44.637367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-10T12:24:44.637385Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-10T12:24:44.637443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-10T12:24:44.637461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-01-10T12:24:44.637497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2026-01-10T12:24:44.638894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2026-01-10T12:24:44.638917Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:44.639157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:44.640492Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:24:44.640743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:24:44.642625Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-10T12:24:44.643231Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:24:44.643547Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-10T12:24:44.643653Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-10T12:24:44.645322Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:24:44.646009Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:24:44.646064Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:24:44.646155Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:24:46.442852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2026-01-10T12:24:46.442933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2026-01-10T12:24:46.442973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-10T12:24:51.126864Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cbb79de3cd0]{reqId} Connect to grpc://localhost:17974 2026-01-10T12:24:51.129780Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbb79de3cd0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-10T12:24:51.145487Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cbb79de3cd0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |94.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2026-01-10T12:24:51.107882Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb2991e4750] Connect to grpc://localhost:15315 2026-01-10T12:24:51.129078Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2991e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-10T12:24:51.144930Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb2991e4750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2026-01-10T12:24:51.145473Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2991e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2026-01-10T12:24:51.147767Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb2991e4750] Status 7 Permission Denied 2026-01-10T12:24:51.148281Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2991e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-10T12:24:51.149802Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb2991e4750] Status 7 Permission Denied 2026-01-10T12:24:51.150374Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb2991e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2026-01-10T12:24:51.151758Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb2991e4750] Status 7 Permission Denied |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 18211, MsgBus: 65304 2026-01-10T12:24:45.154629Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707844754784246:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:45.154730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:45.401302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:45.401433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:45.433852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:45.475483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:45.475626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:45.477423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707844754784218:2081] 1768047885153040 != 1768047885153043 2026-01-10T12:24:45.527635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:45.527678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:45.527693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:45.527852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:45.734514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:45.884090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:45.928037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.053352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.163304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:46.203787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:46.268727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:47.809266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707853344720679:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.809445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.809705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707853344720689:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:47.809781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.074822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.097116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.123404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.148175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.177156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.210828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.271175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.306308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:48.378571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707857639688855:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.378666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.378713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707857639688860:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.378853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707857639688862:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.378885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:48.382566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:48.393305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707857639688863:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:24:48.462347Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707857639688915:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:50.155032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707844754784246:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:50.155110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.784398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.862968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.870337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.870694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.870796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.143265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.143367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.148018Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887420364 != 1768047887420367 2026-01-10T12:24:50.160361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.202613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.299454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.594088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.608992Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.712852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:50.972312Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-10T12:24:50.972415Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-10T12:24:51.058485Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.085747s, errors=0 2026-01-10T12:24:51.058562Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.523345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.647186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.655775Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.656096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.656205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:49.965912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:49.966053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:49.970508Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887325360 != 1768047887325363 2026-01-10T12:24:49.983373Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.030560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.123002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.418374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.432563Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.544127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:50.840055Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-10T12:24:50.840209Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-10T12:24:50.853575Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-10T12:24:50.853664Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:50.853725Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:50.853762Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:50.853842Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:50.853877Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:50.857439Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=N2E1ZmIzNjYtZjIyYzU3ODAtNjRjZTAyM2YtMWFmOTc2Zjc= 2026-01-10T12:24:50.861226Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=MjQ2OWM3ZWQtM2U2NjdlZjUtZTAxMzdhNTItNjliNTcwZTI= 2026-01-10T12:24:50.861286Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=OGQ3YzI5NzgtYmVjNTIwNjEtNDNlNzhlZGEtYzQxOTBlMjg= 2026-01-10T12:24:50.864786Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=ZWE1Y2NmY2MtNmI4MDA1MzktY2VjNjJjMzUtYWMzNzI0Y2I= 2026-01-10T12:24:50.864853Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=NDZkYjU2ZWYtMzJlZjAxZGYtNzQ1YTZhNDktOTU5MmM4YTc= 2026-01-10T12:24:50.878337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.878469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.878539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.878599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.878670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.878758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.879660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.883017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2863], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.883115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:50.891405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:50.947939Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:50.949301Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:50.950167Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:50.950934Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.069886Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.070040Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.070113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.070167Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.070236Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.104060Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:51.912668Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1768047891.912623s, errors=0 2026-01-10T12:24:51.912905Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1768047891912 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:51.926061Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1167:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:51.986069Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1768047891.986036s, errors=0 2026-01-10T12:24:51.986287Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1768047891986 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:51.999067Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1214:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.059755Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1260:3008] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.075050Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1768047892.075010s, errors=0 2026-01-10T12:24:52.075175Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1768047892075 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.118373Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1768047892.118340s, errors=0 2026-01-10T12:24:52.118590Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1768047892118 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.131507Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1307:3030] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.190259Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1768047892.190224s, errors=0 2026-01-10T12:24:52.190495Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1768047892190 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.190556Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1.337205s, oks# 20, errors# 0 2026-01-10T12:24:52.190655Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.816663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.929950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.941771Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.942363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.942542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.238325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.238451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.243339Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303603 != 1768047887303606 2026-01-10T12:24:50.252402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.294652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.402383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.680016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.693716Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.796411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:51.085489Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-10T12:24:51.085613Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-10T12:24:51.090162Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-10T12:24:51.090250Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:51.090321Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:51.090365Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:51.090408Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:51.090454Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-10T12:24:51.101256Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=YmUyMTkxNWEtYjA1ODRmNzktMzA5NmZhMzAtODgyZDc5Yzk= 2026-01-10T12:24:51.105362Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=ZTc3NjZkMGUtZDg1ZDI1NzctYzVmNTgwYzctMmM3MGJkMDc= 2026-01-10T12:24:51.105437Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=MjkzNjhjOTUtOTZkNjBkZmQtYzA5NThhYWMtZTBlMWQ5Yw== 2026-01-10T12:24:51.109086Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=NjUxMjViZDktNzBmYTcwZGQtOTg0N2FkOTktYmMzYzJiNTk= 2026-01-10T12:24:51.109172Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=ZDFiNWE4Mi0yZmZmM2YyMS04NmI3Y2U2Yy00ZGVjMjE5Ng== 2026-01-10T12:24:51.114466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.114604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.114673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.114747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.114849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.114922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.115940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.117355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2863], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.117458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.124033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:51.181887Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.183249Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.184106Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.184800Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.303437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.303563Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.303635Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.303690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.303756Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.339126Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:51.912443Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1768047891.912392s, errors=0 2026-01-10T12:24:51.912788Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1768047891912 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:51.926751Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1167:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:51.990364Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1768047891.990322s, errors=0 2026-01-10T12:24:51.990663Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1768047891990 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.004916Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1214:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.065941Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1768047892.065900s, errors=0 2026-01-10T12:24:52.066092Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1768047892065 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.079204Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1261:3009] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.127767Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1297:3026] txid# 281474976715682, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.160419Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1768047892.160378s, errors=0 2026-01-10T12:24:52.160691Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1768047892160 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.189781Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1768047892.189747s, errors=0 2026-01-10T12:24:52.189902Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1768047892189 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.189957Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1.100045s, oks# 20, errors# 0 2026-01-10T12:24:52.190207Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |94.3%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:50.409416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:50.490064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:50.496790Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:50.497169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:50.497259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.815686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.815837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.823062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303650 != 1768047887303653 2026-01-10T12:24:50.836662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.884519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.969343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:51.281460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:51.296305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:51.401541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:51.714884Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2026-01-10T12:24:51.715017Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2026-01-10T12:24:51.719629Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-10T12:24:51.719723Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-10T12:24:51.719780Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-10T12:24:51.719826Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-10T12:24:51.719896Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-10T12:24:51.719933Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-10T12:24:51.723805Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=ZDFkOWQ3MDQtZGMwOGM5NmItZWNkNTRmYi1mMjE1MGNlNQ== 2026-01-10T12:24:51.727978Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=OWMwOGE3MzAtZWRjNmIxMDctMjkzNzRjZTMtM2VhMzRmZjM= 2026-01-10T12:24:51.728059Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=MTk1ZTM0YTgtOTI5NGM5NGMtYTk5NTI5NmQtM2M1ODJkNmQ= 2026-01-10T12:24:51.731937Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=NTkwZTE2MDktN2YyZTBkMWEtZjZmYzhkYWMtMzBmZTdmMDY= 2026-01-10T12:24:51.732012Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=MTFiMDk1MDAtZWJlY2Y0NmQtZjI3ZThmZi04ODU0NWQxNw== 2026-01-10T12:24:51.737138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.737293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.737365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.737427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.737489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.737579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.738523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.739878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2863], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.739971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:51.746400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:51.795642Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.797035Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.797923Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.798733Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:24:51.920240Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.920399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.920473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.920537Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.920629Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:24:51.956143Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.317426Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1146:2956] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.333593Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1162:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.400151Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1768047892.400060s, errors=0 2026-01-10T12:24:52.400431Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1768047892400 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.421082Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1768047892.421045s, errors=0 2026-01-10T12:24:52.421457Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1768047892421 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.421516Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1768047892.421489s, errors=0 2026-01-10T12:24:52.421592Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1768047892421 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.435594Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1254:3002] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.496301Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1768047892.496255s, errors=0 2026-01-10T12:24:52.496523Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1768047892496 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.509351Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1301:3024] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:52.569850Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1768047892.569807s, errors=0 2026-01-10T12:24:52.570066Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1768047892569 OperationsOK: 4 OperationsError: 0 } 2026-01-10T12:24:52.570112Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 0.850814s, oks# 20, errors# 0 2026-01-10T12:24:52.570195Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |94.3%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TBlobStorageWardenTest::TestHttpMonPage >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases >> TBlobStorageWardenTest::TestDeleteStoragePool >> BindQueue::Basic >> TDistconfGenerateConfigTest::GenerateConfig3DCCases >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases [GOOD] >> TDistconfGenerateConfigTest::IgnoreNodes [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases [GOOD] >> TDistconfGenerateConfigTest::BadRack [GOOD] >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TDistconfGenerateConfigTest::UsedNodes [GOOD] >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 10 } Ring { Node: 11 } Ring { Node: 12 } Ring { Node: 13 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 12 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] Test command err: Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } Ring { Node: 2 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 6 } Ring { Node: 11 } Ring { Node: 16 } Ring { Node: 21 } Ring { Node: 26 } Ring { Node: 31 } Ring { Node: 36 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 11 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] Test command err: Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 3 } Ring { Node: 5 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 19 } Ring { Node: 20 } Ring { Node: 21 } Ring { Node: 37 } Ring { Node: 38 } Ring { Node: 39 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 10 } Ring { Node: 19 } } } Expected: NToSelect: 3 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> BsControllerConfig::MergeBoxes [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> KqpPg::NoTableQuery+useSink >> KqpPg::InsertNoTargetColumns_Simple+useSink >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::InsertFromSelect_Simple+useSink >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden >> KqpPg::ReadPgArray >> KqpPg::CreateTableSerialColumns+useSink >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.552332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.631005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.639364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.639694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.639771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:49.981236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:49.981370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:49.987459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303601 != 1768047887303604 2026-01-10T12:24:49.999645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.046335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.124756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.418241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.432387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.544125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:50.825288Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:50.825441Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-10T12:24:50.977760Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.151839s, errors=0 2026-01-10T12:24:50.977864Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-10T12:24:53.723205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:53.729799Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:53.730022Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:53.730063Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:53.917121Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:53.917219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:53.935767Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047891456950 != 1768047891456954 2026-01-10T12:24:53.938951Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:53.980691Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:54.050479Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:54.348933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:54.363035Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:54.464642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:54.670320Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:54.670418Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-10T12:24:54.749556Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.078862s, errors=0 2026-01-10T12:24:54.749649Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.444088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.584741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.603232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.603844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.603976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.015646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.015782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.022283Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303644 != 1768047887303647 2026-01-10T12:24:50.034899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.082595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.176497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.452064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.466942Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.571535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:50.851179Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:50.851349Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-10T12:24:50.912750Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.060970s, errors=0 2026-01-10T12:24:50.912864Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-10T12:24:54.043325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:54.051690Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:54.051947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:54.051992Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:54.242101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:54.242248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:54.274450Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047891487757 != 1768047891487761 2026-01-10T12:24:54.279701Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:54.327389Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:54.397344Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:54.703376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:54.717151Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:54.817393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:55.021259Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:55.021374Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-10T12:24:55.077331Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.055670s, errors=0 2026-01-10T12:24:55.077423Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.975093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:04.095752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:04.103810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:04.103956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:04.104016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:04.605302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:04.742583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.742726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.776928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:04.872032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:05.653677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:05.654271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:05.654318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:05.654366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:05.655005Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:05.721359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:06.338079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.697177Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.705650Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-10T12:24:09.710071Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:09.748501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.748624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.793877Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:09.796512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.991223Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.991365Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.993631Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.994794Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.996028Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.996892Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.997136Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.997368Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.997489Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.997674Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.997883Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.014399Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:10.286429Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:10.317388Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:10.317500Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:10.356526Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:10.357182Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:10.357422Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:10.357492Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:10.357549Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:10.357614Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:10.357683Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:10.357740Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:10.359095Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:10.385624Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.385723Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2215:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.414621Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2271:2631] 2026-01-10T12:24:10.415127Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2271:2631], schemeshard id = 72075186224037897 2026-01-10T12:24:10.482290Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2335:2662] 2026-01-10T12:24:10.485131Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-10T12:24:10.494367Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Describe result: PathErrorUnknown 2026-01-10T12:24:10.494426Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Creating table 2026-01-10T12:24:10.494514Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-10T12:24:10.511267Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2404:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.515296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.523150Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.523315Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.538806Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:10.740441Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:10.767873Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-10T12:24:11.162124Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:11.255418Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:11.255507Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Column diff is empty, finishing 2026-01-10T12:24:11.913169Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWake ... d[ 22 ] 2026-01-10T12:24:41.003014Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:5802:3367], StatRequests.size() = 1 2026-01-10T12:24:41.923594Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:41.924343Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-10T12:24:41.924687Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:24:41.924756Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:24:41.960623Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:5843:3377]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:41.960911Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:24:41.960946Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:5843:3377], StatRequests.size() = 1 2026-01-10T12:24:43.037374Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:5886:3389]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:43.037651Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:24:43.037688Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:5886:3389], StatRequests.size() = 1 2026-01-10T12:24:44.137218Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:5929:3399]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:44.137547Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:24:44.137588Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5929:3399], StatRequests.size() = 1 2026-01-10T12:24:45.272226Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:45.461363Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:45.461467Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:45.461508Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:45.461551Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:45.520681Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5980:3414]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:45.521067Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:24:45.521115Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5980:3414], StatRequests.size() = 1 2026-01-10T12:24:46.672612Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6026:3424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:46.672924Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:24:46.672963Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6026:3424], StatRequests.size() = 1 2026-01-10T12:24:47.837444Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:47.838270Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-10T12:24:47.838893Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:24:47.838974Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:24:47.861519Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:47.861604Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:47.861915Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:47.877049Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:47.958661Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6068:3436]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:47.959032Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:24:47.959084Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6068:3436], StatRequests.size() = 1 2026-01-10T12:24:49.162628Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6114:3448]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:49.162970Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:24:49.163017Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6114:3448], StatRequests.size() = 1 2026-01-10T12:24:49.831891Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-10T12:24:49.831953Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-10T12:24:49.832325Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:49.846245Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-10T12:24:50.482490Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6155:3458]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:50.482841Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:24:50.482888Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6155:3458], StatRequests.size() = 1 2026-01-10T12:24:51.568244Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:51.706460Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6200:3470]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:51.706703Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:24:51.706754Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6200:3470], StatRequests.size() = 1 2026-01-10T12:24:52.825137Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6245:3482]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:52.825489Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:24:52.825535Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6245:3482], StatRequests.size() = 1 2026-01-10T12:24:53.824057Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:53.824275Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 7 2026-01-10T12:24:53.824718Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:53.824899Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:53.847256Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:53.847347Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:53.847593Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:53.862042Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:53.887675Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [3:6290:3494]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:53.888055Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:24:53.888151Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [3:6290:3494], StatRequests.size() = 1 2026-01-10T12:24:53.888756Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6292:3394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:53.893346Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:24:53.893451Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6302:3398] 2026-01-10T12:24:53.893531Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6302:3398] 2026-01-10T12:24:53.895072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6302:3398], server id = [2:6308:3399], tablet id = 72075186224038895, status = OK 2026-01-10T12:24:53.895155Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6308:3399] 2026-01-10T12:24:53.895449Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6308:3399], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:53.895521Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:24:53.895882Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:24:53.895973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6292:3394], StatRequests.size() = 1 2026-01-10T12:24:53.896062Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10916:2167] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10916:2167] Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11117:2156] recipient: [1:10916:2167] 2026-01-10T12:23:37.172405Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-10T12:23:37.173472Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-10T12:23:37.173778Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-10T12:23:37.175489Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:23:37.175862Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-10T12:23:37.176212Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:37.176233Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-10T12:23:37.176558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-10T12:23:37.183869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-10T12:23:37.183965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-10T12:23:37.184159Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-10T12:23:37.184261Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:37.184342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-10T12:23:37.184389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11138:2156] recipient: [1:110:2157] 2026-01-10T12:23:37.197112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-10T12:23:37.197243Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.247077Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-10T12:23:37.247227Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.247299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-10T12:23:37.247372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.247458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-10T12:23:37.247502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.247529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-10T12:23:37.247568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.258274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-10T12:23:37.258409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.269318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-10T12:23:37.269498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-10T12:23:37.271052Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-10T12:23:37.271109Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-10T12:23:37.271368Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-10T12:23:37.271425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-10T12:23:37.284128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... } Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2026-01-10T12:24:45.107563Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2026-01-10T12:24:45.107592Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2026-01-10T12:24:45.107630Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2026-01-10T12:24:45.107657Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2026-01-10T12:24:45.107677Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2026-01-10T12:24:45.107698Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2026-01-10T12:24:45.107734Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2026-01-10T12:24:45.107763Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2026-01-10T12:24:45.107789Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2026-01-10T12:24:45.107811Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2026-01-10T12:24:45.107831Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2026-01-10T12:24:45.107851Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2026-01-10T12:24:45.107874Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2026-01-10T12:24:45.107911Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2026-01-10T12:24:45.107948Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2026-01-10T12:24:45.107990Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2026-01-10T12:24:45.108020Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2026-01-10T12:24:45.108048Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2026-01-10T12:24:45.108092Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2026-01-10T12:24:45.108123Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2026-01-10T12:24:45.108159Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2026-01-10T12:24:45.108195Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2026-01-10T12:24:45.108223Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2026-01-10T12:24:45.108242Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2026-01-10T12:24:45.108266Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2026-01-10T12:24:45.108306Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2026-01-10T12:24:45.108327Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2026-01-10T12:24:45.108345Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2026-01-10T12:24:45.108371Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2026-01-10T12:24:45.108399Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2026-01-10T12:24:45.108418Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2026-01-10T12:24:45.108453Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2026-01-10T12:24:45.108482Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2026-01-10T12:24:45.108501Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2026-01-10T12:24:45.108520Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2026-01-10T12:24:45.108567Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2026-01-10T12:24:45.108598Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2026-01-10T12:24:45.108632Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2026-01-10T12:24:45.108660Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2026-01-10T12:24:45.108681Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2026-01-10T12:24:45.108716Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2026-01-10T12:24:45.108758Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2026-01-10T12:24:45.108788Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2026-01-10T12:24:45.108815Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2026-01-10T12:24:45.108839Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2026-01-10T12:24:45.108860Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2026-01-10T12:24:45.108880Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2026-01-10T12:24:45.108931Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2026-01-10T12:24:45.108963Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2026-01-10T12:24:45.108995Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2026-01-10T12:24:45.109022Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2026-01-10T12:24:45.109052Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2026-01-10T12:24:45.109079Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2026-01-10T12:24:45.109105Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2026-01-10T12:24:45.109132Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2026-01-10T12:24:45.109169Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2026-01-10T12:24:45.109203Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2026-01-10T12:24:45.109238Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2026-01-10T12:24:45.109267Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2026-01-10T12:24:45.109285Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2026-01-10T12:24:45.109304Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2026-01-10T12:24:45.109330Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2026-01-10T12:24:45.109358Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2026-01-10T12:24:45.109389Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2026-01-10T12:24:45.109415Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2026-01-10T12:24:45.109458Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2026-01-10T12:24:45.109485Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2026-01-10T12:24:45.109515Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2026-01-10T12:24:45.109544Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2026-01-10T12:24:45.109582Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2026-01-10T12:24:45.109604Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2026-01-10T12:24:45.109624Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2026-01-10T12:24:45.109647Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2026-01-10T12:24:45.109677Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2026-01-10T12:24:45.109704Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2026-01-10T12:24:45.323340Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.217608s 2026-01-10T12:24:45.323538Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.217830s 2026-01-10T12:24:45.363793Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2026-01-10T12:24:45.384897Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:50.240878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:50.364956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:50.373531Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:50.373997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:50.374099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.676184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.676284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.680571Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303601 != 1768047887303604 2026-01-10T12:24:50.689394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.731605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.822582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:51.107056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:51.121126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:51.223224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:51.490646Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2026-01-10T12:24:51.490861Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-10T12:24:51.565577Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.074289s, errors=0 2026-01-10T12:24:51.565692Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-10T12:24:54.335013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:54.341725Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:54.341958Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:54.342001Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:54.512765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:54.512887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:54.539137Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047892002367 != 1768047892002371 2026-01-10T12:24:54.542328Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:54.583652Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:54.664303Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:54.976988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:54.990958Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:55.091974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:55.299050Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-10T12:24:55.299148Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-10T12:24:55.354872Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.055429s, errors=0 2026-01-10T12:24:55.354949Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] Test command err: 2026-01-10T12:24:54.768168Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.769024Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.770354Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.772482Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.773290Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.773356Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/001501/r3tmp/tmpvuwGQB/pdisk_1.dat 2026-01-10T12:24:55.284572Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [abc2fc901918ac71] bootstrap ActorId# [1:554:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1343:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.284686Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284740Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284771Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284791Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284809Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284837Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1343:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284868Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:1343:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.284935Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1343:1] Marker# BPG33 2026-01-10T12:24:55.284969Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1343:1] Marker# BPG32 2026-01-10T12:24:55.284997Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1343:2] Marker# BPG33 2026-01-10T12:24:55.285015Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1343:2] Marker# BPG32 2026-01-10T12:24:55.285035Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1343:3] Marker# BPG33 2026-01-10T12:24:55.285051Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1343:3] Marker# BPG32 2026-01-10T12:24:55.285171Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:67:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1343:3] FDS# 1343 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285218Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1343:2] FDS# 1343 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285263Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1343:1] FDS# 1343 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.286917Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1343:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90574 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-10T12:24:55.287081Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1343:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90574 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-10T12:24:55.287134Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1343:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90574 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-10T12:24:55.287185Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1343:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-10T12:24:55.287225Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1343:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:55.287376Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.832 sample PartId# [72057594037932033:2:8:0:0:1343:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.832 sample PartId# [72057594037932033:2:8:0:0:1343:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.833 sample PartId# [72057594037932033:2:8:0:0:1343:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.533 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.65 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.701 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-10T12:24:55.322704Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [a55b41de52eb2a08] bootstrap ActorId# [1:600:2506] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.322833Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322864Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322885Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322920Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322938Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322956Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.322987Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [a55b41de52eb2a08] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.323034Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-10T12:24:55.323068Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-10T12:24:55.323099Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-10T12:24:55.323118Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-10T12:24:55.323141Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-10T12:24:55.323159Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-10T12:24:55.323300Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.323353Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.323381Z node 1 :BS_PROXY DEBUG: group_sessions.h:19 ... nkIdx# 0 SectorIdx# 252 PDiskId# 1002 2026-01-10T12:24:57.066150Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1002 2026-01-10T12:24:57.066204Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1002 2026-01-10T12:24:57.066242Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1002 2026-01-10T12:24:57.066280Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1002 2026-01-10T12:24:57.066318Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1002 2026-01-10T12:24:57.066352Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1002 2026-01-10T12:24:57.066391Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1002 2026-01-10T12:24:57.066438Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1002 2026-01-10T12:24:57.066477Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1002 2026-01-10T12:24:57.066518Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1002 2026-01-10T12:24:57.066554Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1002 2026-01-10T12:24:57.066596Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1002 2026-01-10T12:24:57.066652Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1002 2026-01-10T12:24:57.066720Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1002 2026-01-10T12:24:57.066774Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1002 2026-01-10T12:24:57.066822Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1002 2026-01-10T12:24:57.066857Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1002 2026-01-10T12:24:57.066918Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1002 2026-01-10T12:24:57.066953Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1002 2026-01-10T12:24:57.066993Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1002 2026-01-10T12:24:57.067044Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1002 2026-01-10T12:24:57.067108Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1002 2026-01-10T12:24:57.067188Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D283510F480 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1002 2026-01-10T12:24:57.067273Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000004 PDiskId# 1002 2026-01-10T12:24:57.067362Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2565120001348 PDiskId# 1002 2026-01-10T12:24:57.067411Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1002 2026-01-10T12:24:57.067693Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-10T12:24:57.067784Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2383.408277 2026-01-10T12:24:57.068128Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1002 2026-01-10T12:24:57.068237Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-10T12:24:57.068278Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:57.068316Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.068383Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-10T12:24:57.068450Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000005 PDiskId# 1002 2026-01-10T12:24:57.068507Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2565120001604 PDiskId# 1002 2026-01-10T12:24:57.068578Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.068685Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2383.409456 2026-01-10T12:24:57.068767Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-10T12:24:57.068804Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:57.068835Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.068870Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-10T12:24:57.068916Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.079090Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.089329Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.099576Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.109827Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.118987Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000006 PDiskId# 1002 2026-01-10T12:24:57.119070Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001848 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2383.460015 2026-01-10T12:24:57.119242Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-10T12:24:57.119329Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001848 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:57.119378Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.119493Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 12 NumActiveSlots: 0 SlotSizeInUnits: 2 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } 2026-01-10T12:24:57.129729Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.139957Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.150210Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.160452Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.170670Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.180893Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.191111Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.201364Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.211599Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-10T12:24:57.221845Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] Test command err: 2026-01-10T12:24:54.765356Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/001558/r3tmp/tmpUQw0iW/pdisk_1.dat 2026-01-10T12:24:54.814756Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.814838Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.817820Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.817867Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.820169Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.285109Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:488:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1350:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.285240Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285286Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285317Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285344Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285371Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285398Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.285436Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:1350:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.285502Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG33 2026-01-10T12:24:55.285548Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG32 2026-01-10T12:24:55.285590Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG33 2026-01-10T12:24:55.285618Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG32 2026-01-10T12:24:55.285649Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG33 2026-01-10T12:24:55.285676Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG32 2026-01-10T12:24:55.285835Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:48:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:3] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285910Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:41:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:2] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285966Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:62:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:1] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.287593Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-10T12:24:55.287764Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-10T12:24:55.287852Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-10T12:24:55.287917Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-10T12:24:55.287977Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:55.288175Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.011 sample PartId# [72057594037932033:2:8:0:0:1350:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.012 sample PartId# [72057594037932033:2:8:0:0:1350:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.012 sample PartId# [72057594037932033:2:8:0:0:1350:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.675 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.814 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.899 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-10T12:24:55.316291Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [8d27cf9df52bfb78] bootstrap ActorId# [1:534:2504] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.316452Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316500Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316528Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316555Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316580Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316609Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.316653Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.316719Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-10T12:24:55.316764Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-10T12:24:55.316807Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-10T12:24:55.316834Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-10T12:24:55.316861Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-10T12:24:55.316885Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-10T12:24:55.317021Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:41:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.317086Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:62:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.317128Z node 1 :BS_PROXY DEBUG: group_sessions.h:1 ... 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-10T12:24:56.829673Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:606:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:56.832139Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [91379e686f748e92] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-10T12:24:56.832235Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-10T12:24:56.832284Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:56.832773Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:24:56.832812Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-10T12:24:56.832880Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2026-01-10T12:24:56.833157Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/d9xi/001558/r3tmp/tmphL1bpS//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-10T12:24:56.833862Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-10T12:24:56.833894Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:24:56.835724Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:619:2106] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.835856Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:620:2107] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.835940Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:621:2108] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.836045Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:622:2109] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.836181Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:623:2110] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.836297Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:624:2111] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.836426Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:617:2105] Create Queue# [3:625:2112] targetNodeId# 2 Marker# DSP01 2026-01-10T12:24:56.836454Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:24:56.837888Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838135Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838214Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838327Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838366Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838422Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838473Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:56.838494Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-10T12:24:56.838527Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-10T12:24:56.838655Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [cd65997ea3b51537] bootstrap ActorId# [3:626:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2026-01-10T12:24:56.838698Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [cd65997ea3b51537] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:2:0:0:0] node# 2 Marker# DSPB03 2026-01-10T12:24:56.838900Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:619:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 11230201027355802793 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2026-01-10T12:24:56.839949Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [cd65997ea3b51537] Handle TEvVBlockResult status# OK From# [82000002:2:0:0:0] NodeId# 2 Marker# DSPB01 2026-01-10T12:24:56.839998Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [cd65997ea3b51537] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2026-01-10T12:24:56.840301Z node 3 :BS_PROXY INFO: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-10T12:24:56.840478Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-10T12:24:56.840739Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [efc53170c63234c6] bootstrap ActorId# [2:627:2520] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:56.840845Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [efc53170c63234c6] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:56.840902Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [efc53170c63234c6] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:56.840967Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [efc53170c63234c6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2026-01-10T12:24:56.841004Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [efc53170c63234c6] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2026-01-10T12:24:56.841109Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:606:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:56.841295Z node 2 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:581: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-10T12:24:56.841512Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:265: [efc53170c63234c6] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-10T12:24:56.841585Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [efc53170c63234c6] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2026-01-10T12:24:56.841635Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:499: [efc53170c63234c6] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:56.841948Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:619:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:50.282928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:50.372874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:50.383022Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:50.383541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:50.383663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:50.713241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:50.713361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:50.719914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303624 != 1768047887303627 2026-01-10T12:24:50.732986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.779960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.864335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:51.154760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:51.168806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:51.270526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:51.540930Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2026-01-10T12:24:51.542843Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-10T12:24:51.590846Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.047611s, errors=0 2026-01-10T12:24:51.591472Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-10T12:24:51.591628Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:961:2819] with id# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-10T12:24:51.592913Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-10T12:24:51.593084Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:964:2822] 2026-01-10T12:24:51.593193Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-10T12:24:51.593237Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-10T12:24:51.593539Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:51.600260Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.006653s, read# 1000 2026-01-10T12:24:51.600698Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 } 2026-01-10T12:24:51.600874Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:967:2825] 2026-01-10T12:24:51.600932Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-10T12:24:51.600961Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-10T12:24:51.601233Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:51.834561Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} finished in 0.233271s, read# 1000 2026-01-10T12:24:51.834710Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 233 OperationsOK: 1000 OperationsError: 0 } 2026-01-10T12:24:51.834826Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:970:2828] 2026-01-10T12:24:51.834868Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-10T12:24:51.834896Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-10T12:24:51.835135Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:51.906194Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} finished in 0.071006s, read# 1000 2026-01-10T12:24:51.906339Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 71 OperationsOK: 1000 OperationsError: 0 } 2026-01-10T12:24:51.906480Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:973:2831] 2026-01-10T12:24:51.906533Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called, sample# 1000 2026-01-10T12:24:51.906563Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-10T12:24:51.906812Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:51.909727Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} finished in 0.002353s, sampled# 1000, iter finished# 1, oks# 1000 2026-01-10T12:24:51.909833Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 1000 2026-01-10T12:24:51.909994Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} started read actor with id# [1:976:2834] 2026-01-10T12:24:51.910049Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called, will read keys# 1000 2026-01-10T12:24:52.325839Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-10T12:24:52.326039Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 416 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 45\n" } 2026-01-10T12:24:52.326215Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 0.734411s with report: { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 233 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 71 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 416 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 45\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-10T12:24:52.326590Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:961:2819] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-10T12:24:54.954607Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:54.961684Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:54.961896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:54.961938Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:55.166431Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:55.166541Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:55.190968Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047892834336 != 1768047892834340 2026-01-10T12:24:55.194060Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:55.235856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:55.315095Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:55.579166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:55.592162Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:55.695257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:55.915034Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2026-01-10T12:24:55.915411Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-10T12:24:55.927751Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor finished in 0.012012s, errors=0 2026-01-10T12:24:55.928526Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-10T12:24:55.928652Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:961:2819] with id# {Tag: 0, parent: [2:952:2810], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-10T12:24:55.929990Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-10T12:24:55.930122Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:964:2822] 2026-01-10T12:24:55.930232Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-10T12:24:55.930285Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-10T12:24:55.930610Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:55.931610Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} finished in 0.000948s, read# 10 2026-01-10T12:24:55.931775Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-10T12:24:55.931895Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:967:2825] 2026-01-10T12:24:55.931950Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-10T12:24:55.931982Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-10T12:24:55.932223Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:55.934969Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 0.002704s, read# 10 2026-01-10T12:24:55.935083Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 2 OperationsOK: 10 OperationsError: 0 } 2026-01-10T12:24:55.935167Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:970:2828] 2026-01-10T12:24:55.935207Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-10T12:24:55.935251Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-10T12:24:55.935466Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:55.936156Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 0.000652s, read# 10 2026-01-10T12:24:55.936259Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-10T12:24:55.936360Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:973:2831] 2026-01-10T12:24:55.936409Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Bootstrap called, sample# 10 2026-01-10T12:24:55.936436Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-10T12:24:55.936621Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:55.937087Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 0.000399s, sampled# 10, iter finished# 1, oks# 10 2026-01-10T12:24:55.937172Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received keyCount# 10 2026-01-10T12:24:55.937323Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} started read actor with id# [2:976:2834] 2026-01-10T12:24:55.937396Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:961:2819], subTag: 5} Bootstrap called, will read keys# 10 2026-01-10T12:24:56.295974Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-10T12:24:56.296194Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 358 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 23\n" } 2026-01-10T12:24:56.296350Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 0.367550s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 2 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 358 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 23\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-10T12:24:56.296447Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [GOOD] >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2026-01-10T12:24:54.767455Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.769157Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.769223Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.770166Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.771256Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.771306Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/0014d8/r3tmp/tmpxcrqoD/pdisk_1.dat 2026-01-10T12:24:55.284356Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [1:487:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1347:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.284502Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284534Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284555Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284573Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284591Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284609Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1347:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.284646Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1347:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.284696Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1347:1] Marker# BPG33 2026-01-10T12:24:55.284730Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1347:1] Marker# BPG32 2026-01-10T12:24:55.284761Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1347:2] Marker# BPG33 2026-01-10T12:24:55.284783Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1347:2] Marker# BPG32 2026-01-10T12:24:55.284803Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1347:3] Marker# BPG33 2026-01-10T12:24:55.284820Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1347:3] Marker# BPG32 2026-01-10T12:24:55.284957Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:48:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1347:3] FDS# 1347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285004Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:41:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1347:2] FDS# 1347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.285035Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:62:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1347:1] FDS# 1347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.286892Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1347:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90606 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-10T12:24:55.287092Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1347:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90606 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-10T12:24:55.287178Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1347:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90606 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-10T12:24:55.287249Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-10T12:24:55.287311Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:55.287480Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.365 sample PartId# [72057594037932033:2:8:0:0:1347:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.366 sample PartId# [72057594037932033:2:8:0:0:1347:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.366 sample PartId# [72057594037932033:2:8:0:0:1347:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.261 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.435 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.516 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-10T12:24:55.301476Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2026-01-10T12:24:55.303669Z node 1 :BS_PROXY CRIT: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2026-01-10T12:24:55.303938Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2026-01-10T12:24:55.304052Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 2026-01-10T12:24:55.777312Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.778260Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.778316Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.778355Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.779209Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.779765Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/0014d8/r3tmp/tmpfYttaK/pdisk_1.dat 2026-01-10T12:24:56.239892Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [2:487:2466] G ... stablishingSessions Marker# DSP03 2026-01-10T12:24:57.474054Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474285Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474396Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474469Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474546Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474638Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474695Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.474735Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-10T12:24:57.474772Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-10T12:24:57.474814Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:340: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2026-01-10T12:24:57.475609Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [f913878b3da83702] bootstrap ActorId# [3:613:2517] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:57.475743Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [f913878b3da83702] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:57.475787Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:57.475835Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2026-01-10T12:24:57.475866Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-10T12:24:57.475973Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:606:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:57.478921Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-10T12:24:57.479019Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-10T12:24:57.479068Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:57.479656Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:24:57.479704Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-10T12:24:57.479859Z node 4 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2026-01-10T12:24:57.480208Z node 4 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/d9xi/0014d8/r3tmp/tmpJVlyNp//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-10T12:24:57.481239Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-10T12:24:57.481285Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:24:57.483559Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:617:2106] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.483737Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:618:2107] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.483879Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:619:2108] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.484010Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:620:2109] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.484158Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:621:2110] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.484282Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:622:2111] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.484400Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:615:2105] Create Queue# [4:623:2112] targetNodeId# 3 Marker# DSP01 2026-01-10T12:24:57.484427Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:24:57.486084Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486314Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486432Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486636Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486746Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486823Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486877Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-10T12:24:57.486918Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-10T12:24:57.486954Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-10T12:24:57.487131Z node 4 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [4:617:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] Test command err: 2026-01-10T12:24:54.771673Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.773219Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.773305Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.774361Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.775610Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.775670Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/001505/r3tmp/tmp8gBjib/pdisk_1.dat Formatting PDisk with guid1 8221990413538430769 Creating PDisk with guid2 18422072310453927247 Creating pdisk 2026-01-10T12:24:55.302175Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:570} PDiskId# 1001 Can't start due to a guid error expected# 18422072310453927247 on-disk# 8221990413538430769 PDiskId# 1001 2026-01-10T12:24:55.344270Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:488:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:347:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:24:55.344422Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344467Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344495Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344520Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344546Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344572Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:24:55.344608Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:347:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:24:55.344675Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:347:1] Marker# BPG33 2026-01-10T12:24:55.344720Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:347:1] Marker# BPG32 2026-01-10T12:24:55.344761Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:347:2] Marker# BPG33 2026-01-10T12:24:55.344789Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:347:2] Marker# BPG32 2026-01-10T12:24:55.344823Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:347:3] Marker# BPG33 2026-01-10T12:24:55.344848Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:347:3] Marker# BPG32 2026-01-10T12:24:55.344985Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:48:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:3] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.345047Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:41:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:2] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.345093Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:62:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:1] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:24:55.346844Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-10T12:24:55.347092Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-10T12:24:55.347231Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-10T12:24:55.347305Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-10T12:24:55.347363Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:24:55.347549Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.014 sample PartId# [72057594037932033:2:8:0:0:347:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.015 sample PartId# [72057594037932033:2:8:0:0:347:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.015 sample PartId# [72057594037932033:2:8:0:0:347:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.797 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.018 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.153 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } Verify that PDisk returns ERROR YardInitResult: {EvYardInitResult Status# CORRUPTED ErrorReason# "PDisk is in StateError, reason# PDiskId# 1001 Can't start due to a guid error expected# 18422072310453927247 on-disk# 8221990413538430769" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 0 ownerRound# 0 SlotSizeInUnits# 0 ChunkSize# 0 AppendBlockSize# 0 RecommendedReadSize# 0 SeekTimeUs# 0 ReadSpeedBps# 0 WriteSpeedBps# 0 ReadBlockSize# 0 WriteBlockSize# 0 BulkWriteBlockSize# 0 PrefetchSizeBytes# 0 GlueRequestDistanceBytes# 0 IsTinyDisk# 0}} OwnedChunks# {}} 2026-01-10T12:24:56.466362Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:56.467724Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:56.467819Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:56.467889Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:56.469028Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:56.469829Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/001505/r3tmp/tmpmuUMQJ/pdisk_1.dat Starting test 2026-01-10T12:24:56.903818Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2537: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON START Marker# BSVS37 2026-01-10T12:24:56.904015Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:707: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery START 2026-01-10T12:24:56.906335Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:190: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-01-10T12:24:56.906957Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_logreplay.cpp:83: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TRecoveryLogReplayer: START 2026-01-10T12:24:56.907434Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:143: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery FINISHED: {RecoveryDuration# 0.002000s RecoveredLogStartLsn# 0 SuccessfulRecovery# true EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {{RecsN# 0 Lsns# [0 0]}}} ... blocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON cookie 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } PDiskId: 1000 VDiskSlotId: 1002 Guid: 372111183072571215 Kind: 0 StoragePoolName: "testEvVGenerationChangeRace" InstanceGuid: 18411598814096661448 GroupSizeInUnits: 0 2026-01-10T12:24:56.962158Z node 2 :BS_SKELETON INFO: blobstorage_skeletonfront.cpp:1727: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) VDisk Generation Change success; new VDiskId# [82000002:2:0:0:0] Marker# BSVSF02 TEvControllerConfigResponse# NKikimrBlobStorage.TEvControllerConfigResponse Response { Status { Success: true } Success: true ConfigTxSeqNo: 5 } Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: Initial DiskSpace: Green Replicated: false UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 18411598814096661448 ReplicationProgress: nan ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 18411598814096661448 GroupSizeInUnits: 0 ... unblocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON 2026-01-10T12:24:56.962763Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1966: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON LOCAL RECOVERY SUCCEEDED Marker# BSVS29 2026-01-10T12:24:56.971628Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2134: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON SYNC GUID RECOVERY SUCCEEDED Marker# BSVS31 2026-01-10T12:24:56.971723Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1845: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON IS UP AND RUNNING Marker# BSVS28 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IncarnationGuid: 16349739802483488852 InstanceGuid: 18411598814096661448 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 1 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 17565690946311970933 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 136881804993758115 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 3 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 13997996214044682174 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 1709126996522288780 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 17565690946311970933 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 136881804993758115 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 13997996214044682174 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 1709126996522288780 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 17565690946311970933 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 136881804993758115 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 13997996214044682174 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 1709126996522288780 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 5246876549241837516 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 17992050148507617965 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 18411598814096661448 AvailableSize: 34225520640 GroupSizeInUnits: 2 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] Test command err: Disable nodeId# 3 Pick Enable nodeId# 3 Pick Pick Delete nodeId# 41 Add nodeId# 101 Delete nodeId# 77 Disable nodeId# 46 Delete nodeId# 80 Pick Pick Enable nodeId# 46 Disable nodeId# 34 Delete nodeId# 74 Add nodeId# 102 Disable nodeId# 97 Pick Pick Delete nodeId# 42 Pick Disable nodeId# 59 Pick Add nodeId# 103 Disable nodeId# 10 Disable nodeId# 47 Disable nodeId# 38 Pick Pick Disable nodeId# 16 Add nodeId# 104 Pick Delete nodeId# 43 Add nodeId# 105 Delete nodeId# 35 Enable nodeId# 38 Add nodeId# 106 Pick Disable nodeId# 90 Disable nodeId# 61 Delete nodeId# 83 Delete nodeId# 102 Disable nodeId# 3 Pick Disable nodeId# 11 Enable nodeId# 34 Disable nodeId# 88 Add nodeId# 107 Delete nodeId# 79 Enable nodeId# 3 Pick Enable nodeId# 16 Delete nodeId# 31 Disable nodeId# 78 Pick Enable nodeId# 88 Delete nodeId# 13 Disable nodeId# 73 Pick Delete nodeId# 11 Enable nodeId# 61 Add nodeId# 108 Enable nodeId# 59 Add nodeId# 109 Pick Delete nodeId# 86 Disable nodeId# 72 Disable nodeId# 38 Delete nodeId# 22 Add nodeId# 110 Delete nodeId# 39 Enable nodeId# 10 Add nodeId# 111 Add nodeId# 112 Add nodeId# 113 Delete nodeId# 112 Disable nodeId# 52 Delete nodeId# 6 Enable nodeId# 73 Delete nodeId# 4 Enable nodeId# 97 Pick Delete nodeId# 1 Delete nodeId# 66 Delete nodeId# 56 Disable nodeId# 63 Enable nodeId# 90 Add nodeId# 114 Add nodeId# 115 Disable nodeId# 113 Pick Enable nodeId# 52 Delete nodeId# 72 Enable nodeId# 47 Enable nodeId# 113 Add nodeId# 116 Pick Add nodeId# 117 Enable nodeId# 38 Disable nodeId# 59 Enable nodeId# 78 Add nodeId# 118 Delete nodeId# 91 Pick Add nodeId# 119 Enable nodeId# 63 Delete nodeId# 52 Pick Pick Add nodeId# 120 Disable nodeId# 71 Add nodeId# 121 Delete nodeId# 109 Delete nodeId# 65 Pick Disable nodeId# 69 Pick Enable nodeId# 71 Delete nodeId# 104 Disable nodeId# 96 Enable nodeId# 96 Enable nodeId# 69 Pick Add nodeId# 122 Disable nodeId# 12 Delete nodeId# 45 Add nodeId# 123 Pick Delete nodeId# 48 Enable nodeId# 12 Add nodeId# 124 Pick Disable nodeId# 71 Add nodeId# 125 Enable nodeId# 59 Delete nodeId# 115 Disable nodeId# 40 Pick Enable nodeId# 40 Disable nodeId# 3 Pick Delete nodeId# 125 Enable nodeId# 3 Add nodeId# 126 Disable nodeId# 58 Delete nodeId# 28 Pick Delete nodeId# 15 Enable nodeId# 58 Pick Delete nodeId# 88 Disable nodeId# 59 Disable nodeId# 69 Enable nodeId# 71 Enable nodeId# 69 Add nodeId# 127 Pick Enable nodeId# 59 Delete nodeId# 32 Disable nodeId# 76 Pick Disable nodeId# 126 Pick Delete nodeId# 119 Enable nodeId# 76 Pick Enable nodeId# 126 Pick Add nodeId# 128 Disable nodeId# 8 Disable nodeId# 69 Delete nodeId# 78 Pick Pick Disable nodeId# 18 Delete nodeId# 37 Delete nodeId# 5 Disable nodeId# 113 Pick Disable nodeId# 108 Pick Delete nodeId# 14 Delete nodeId# 87 Delete nodeId# 17 Pick Delete nodeId# 99 Disable nodeId# 7 Delete nodeId# 51 Add nodeId# 129 Delete nodeId# 44 Add nodeId# 130 Add nodeId# 131 Delete nodeId# 12 Add nodeId# 132 Pick Disable nodeId# 106 Pick Disable nodeId# 81 Add nodeId# 133 Add nodeId# 134 Add nodeId# 135 Disable nodeId# 100 Add nodeId# 136 Enable nodeId# 108 Enable nodeId# 81 Add nodeId# 137 Enable nodeId# 69 Add nodeId# 138 Delete nodeId# 20 Delete nodeId# 69 Disable nodeId# 135 Delete nodeId# 100 Pick Delete nodeId# 55 Enable nodeId# 113 Enable nodeId# 106 Add nodeId# 139 Delete nodeId# 18 Add nodeId# 140 Pick Enable nodeId# 135 Disable nodeId# 24 Disable nodeId# 50 Add nodeId# 141 Enable nodeId# 8 Delete nodeId# 140 Enable nodeId# 7 Delete nodeId# 33 Disable nodeId# 47 Pick Enable nodeId# 24 Delete nodeId# 138 Enable nodeId# 47 Enable nodeId# 50 Disable nodeId# 60 Pick Disable nodeId# 19 Pick Enable nodeId# 60 Pick Pick Delete nodeId# 8 Enable nodeId# 19 Delete nodeId# 73 Add nodeId# 142 Disable nodeId# 111 Add nodeId# 143 Add nodeId# 144 Enable nodeId# 111 Delete nodeId# 134 Add nodeId# 145 Disable nodeId# 21 Delete nodeId# 62 Add nodeId# 146 Enable nodeId# 21 Disable nodeId# 47 Disable nodeId# 113 Disable nodeId# 67 Enable nodeId# 113 Disable nodeId# 19 Add nodeId# 147 Add nodeId# 148 Pick Add nodeId# 149 Enable nodeId# 19 Pick Disable nodeId# 129 Add nodeId# 150 Delete nodeId# 84 Add nodeId# 151 Enable nodeId# 47 Add nodeId# 152 Enable nodeId# 67 Enable nodeId# 129 Pick Add nodeId# 153 Pick Pick Pick Delete nodeId# 61 Disable nodeId# 122 Delete nodeId# 129 Disable nodeId# 151 Delete nodeId# 130 Enable nodeId# 122 Enable nodeId# 151 Delete nodeId# 135 Add nodeId# 154 Pick Pick Delete nodeId# 147 Add nodeId# 155 Pick Add nodeId# 156 Pick Disable nodeId# 150 Add nodeId# 157 Pick Enable nodeId# 150 Delete nodeId# 93 Disable nodeId# 96 Disable nodeId# 149 Add nodeId# 158 Delete nodeId# 133 Pick Pick Disable nodeId# 158 Pick Add nodeId# 159 Pick Disable nodeId# 106 Add nodeId# 160 Delete nodeId# 82 Delete nodeId# 148 Disable nodeId# 68 Delete nodeId# 27 Add nodeId# 161 Add nodeId# 162 Disable nodeId# 81 Pick Delete nodeId# 89 Delete nodeId# 94 Delete nodeId# 120 Enable nodeId# 68 Enable nodeId# 158 Disable nodeId# 57 Add nodeId# 163 Enable nodeId# 57 Add nodeId# 164 Disable nodeId# 34 Delete nodeId# 36 Disable nodeId# 139 Enable nodeId# 139 Add nodeId# 165 Delete nodeId# 107 Disable nodeId# 49 Enable nodeId# 81 Enable nodeId# 34 Add nodeId# 166 Add nodeId# 167 Delete nodeId# 131 Enable nodeId# 96 Pick Enable nodeId# 106 Pick Add nodeId# 168 Add nodeId# 169 Add nodeId# 170 Pick Disable nodeId# 76 Add nodeId# 171 Delete nodeId# 141 Add nodeId# 172 Delete nodeId# 46 Add nodeId# 173 Enable nodeId# 49 Delete nodeId# 122 Pick Pick Pick Delete nodeId# 113 Disable nodeId# 16 Delete nodeId# 97 Disable nodeId# 155 Add nodeId# 174 Pick Delete nodeId# 150 Add nodeId# 175 Disable nodeId# 67 Enable nodeId# 155 Enable nodeId# 76 Delete nodeId# 146 Add nodeId# 176 Disable nodeId# 76 Pick Add nodeId# 177 Disable nodeId# 166 Enable nodeId# 166 Enable nodeId# 16 Delete nodeId# 85 Enable nodeId# 149 Disable nodeId# 157 Delete nodeId# 76 Pick Disable nodeId# 96 Add nodeId# 178 Add nodeId# 179 Delete nodeId# 3 Disable nodeId# 153 Disable nodeId# 19 Enable nodeId# 96 Enable nodeId# 157 Enable nodeId# 67 Delete nodeId# 29 Disable nodeId# 71 Enable nodeId# 153 Add nodeId# 180 Enable nodeId# 71 Enable nodeId# 19 Disable nodeId# 155 Add nodeId# 181 Disable nodeId# 25 Pick Delete nodeId# 59 Add nodeId# 182 Enable nodeId# 25 Disable nodeId# 124 Disable nodeId# 159 Enable nodeId# 159 Disable nodeId# 70 Pick Disable nodeId# 54 Pick Disable nodeId# 137 Enable nodeId# 70 Delete nodeId# 49 Pick Disable nodeId# 60 Disable nodeId# 180 Enable nodeId# 124 Delete nodeId# 181 Pick Pick Pick Pick Disable nodeId# 175 Add nodeId# 183 Delete nodeId# 106 Enable nodeId# 54 Add nodeId# 184 Pick Disable nodeId# 157 Add nodeId# 185 Delete nodeId# 156 Delete nodeId# 137 Pick Add nodeId# 186 Enable nodeId# 155 Delete nodeId# 163 Pick Add nodeId# 187 Delete nodeId# 103 Enable nodeId# 180 Pick Enable nodeId# 175 Add nodeId# 188 Add nodeId# 189 Disable nodeId# 67 Delete nodeId# 40 Disable nodeId# 110 Delete nodeId# 54 Enable nodeId# 67 Disable nodeId# 128 Delete nodeId# 152 Disable nodeId# 57 Disable nodeId# 158 Pick Pick Disable nodeId# 186 Pick Disable nodeId# 90 Delete nodeId# 136 Add nodeId# 190 Add nodeId# 191 Pick Disable nodeId# 95 Pick Add nodeId# 192 Enable nodeId# 95 Delete nodeId# 30 Enable nodeId# 60 Pick Disable nodeId# 142 Enable nodeId# 90 Disable nodeId# 144 Disable nodeId# 63 Disable nodeId# 26 Disable nodeId# 161 Disable nodeId# 16 Enable nodeId# 16 Delete nodeId# 58 Pick Delete nodeId# 21 Disable nodeId# 34 Delete nodeId# 189 Disable nodeId# 127 Add nodeId# 193 Enable nodeId# 57 Pick Enable nodeId# 128 Delete nodeId# 149 Disable nodeId# 108 Disable nodeId# 192 Add nodeId# 194 Delete nodeId# 110 Disable nodeId# 183 Pick Disable nodeId# 132 Pick Disable nodeId# 143 Enable nodeId# 158 Enable nodeId# 186 Delete nodeId# 186 Enable nodeId# 132 Disable nodeId# 121 Pick Delete nodeId# 176 Add nodeId# 195 Disable nodeId# 114 Delete nodeId# 185 Pick Disable nodeId# 23 Enable nodeId# 143 Enable nodeId# 108 Delete nodeId# 175 Enable nodeId# 142 Delete nodeId# 128 Enable nodeId# 192 Pick Add nodeId# 196 Delete nodeId# 118 Pick Disable nodeId# 190 Pick Pick Delete nodeId# 98 Enable nodeId# 26 Delete nodeId# 16 Enable nodeId# 190 Delete nodeId# 117 Enable nodeId# 121 Disable nodeId# 68 Delete nodeId# 7 Pick Pick Add nodeId# 197 Pick Pick Delete nodeId# 101 Disable nodeId# 10 Enable nodeId# 63 Disable nodeId# 196 Disable nodeId# 177 Pick Disable nodeId# 187 Pick Disable nodeId# 75 Enable nodeId# 196 Disable nodeId# 67 Enable nodeId# 75 Enable nodeId# 157 Enable nodeId# 67 Delete nodeId# 169 Delete nodeId# 194 Add nodeId# 198 Disable nodeId# 180 Delete nodeId# 81 Disable nodeId# 145 Add nodeId# 199 Delete nodeId# 127 Disable nodeId# 158 Add nodeId# 200 Pick Add nodeId# 201 Add nodeId# 202 Disable nodeId# 53 Delete nodeId# 183 Pick Disable nodeId# 90 Disable nodeId# 197 Disable nodeId# 2 Disable nodeId# 67 Disable nodeId# 192 Enable nodeId# 53 Pick Enable nodeId# 144 Enable nodeId# 10 Pick Disable nodeId# 196 Disable nodeId# 170 Enable nodeId# 170 Pick Enable nodeId# 177 Disable nodeId# 160 Delete nodeId# 161 Delete nodeId# 124 Pick Enable nodeId# 187 Disable nodeId# 75 Pick Enable nodeId# 160 Pick Disable nodeId# 182 Disable nodeId# 177 Delete nodeId# 92 Delete nodeId# 160 Enable nodeId# 180 Delete nodeId# 153 Enable nodeId# 68 Add nodeId# 203 Delete nodeId# 57 Disable nodeId# 200 Pick Disable nodeId# 162 Disable nodeId# 9 Disable nodeId# 70 Delete nodeId# 159 Add nodeId# 204 Enable nodeId# 34 Pick Enable nodeId# 192 Enable nodeId# 177 Delete nodeId# 162 Add nodeId# 205 Delete nodeId# 67 Delete nodeId# 200 Enable nodeId# 182 Disable nodeId# 10 Pick Add nodeId# 206 Enable nodeId# 114 Add nodeId# 207 Enable nodeId# 90 Pick Add nodeId# 208 Add nodeId# 209 Disable nodeId# 126 Disable nodeId# 172 Add nodeId# 210 Pick Pick Enable nodeId# 10 Add nodeId# 211 Enable nodeId# 23 Pick Delete nodeId# 202 Delete nodeId# 75 Add nodeId# 212 Delete nodeId# 190 Add nodeId# 213 Add nodeId# 214 Delete nodeId# 25 Enable nodeId# 145 Add nodeId# 215 Disable nodeId# 105 Add nodeId# 216 Add nodeId# 217 Enable nodeId# 105 Enable nodeId# 126 Add nodeId# 218 Disable nodeId# 188 Disable nodeId# 209 Delete nodeId# 213 Delete nodeId# 178 Pick Pick Enable nodeId# 9 Disable nodeId# 64 Enable nodeId# 158 Pick Enable nodeId# 2 Add nodeId# 219 Enable nodeId# 70 Pick Add nodeId# 220 Pick Disable nodeId# 155 Pick Add nodeId# 221 Add nodeId# 222 Enable nodeId# 197 Disable nodeId# 201 Add nodeId# 223 Enable nodeId# 188 Add nodeId# 224 Disable nodeId# 10 Enable nodeId# 209 Disable nodeId# 210 Add nodeId# 225 Delete nodeId# 219 Add nodeId# 226 Add nodeId# 227 Add nodeId# 228 Enable nodeId# 10 Delete nodeId# 218 Delete nodeId# 139 Pick Enable nodeId# 201 Disable nodeId# 223 Disable nodeId# 173 Pick Disable nodeId# 216 Delete nodeId# 184 Delete nodeId# 228 Enable nodeId# 172 Pick Delete nodeId# 187 Disable nodeId# 68 Pick Enable nodeId# 196 Pick Disable nodeId# 105 Delete nodeId# 193 Disable nodeId# 195 Delete nodeId# 195 Pick Add nodeId# 229 Disable nodeId# 71 Add nodeId# 230 Delete nodeId# 198 Add nodeId# 231 Enable nodeId# 68 Enable nodeId# 173 Enable nodeId# 216 Pick Enable nodeId# 64 Enable nodeId# 155 Enable nodeId# 210 Pick Disable nodeId# 53 Delete nodeId# 71 Delete nodeId# 164 Add nodeId# 232 Pick Disable nodeId# 204 Pick Delete nodeId# 126 Pick Add nodeId# 233 Disable nodeId# 210 Add nodeId# 234 Enable nodeId# 223 Delete nodeId# 211 Enable nodeId# 105 Add nodeId# 235 Disable nodeId# 70 Add nodeId# 236 Disable nodeId# 151 Disable nodeId# 9 Disable nodeId# 225 Delete nodeId# 173 Disable nodeId# 179 Delete nodeId# 155 Add nodeId# 237 Delete nodeId# 188 Delete nodeId# 53 Delete nodeId# 196 Enable nodeId# 210 Enable nodeId# 70 Disable nodeId# 212 Enable nodeId# 151 Pick Add nodeId# 238 Delete nodeId# 226 Disable nodeId# 216 Disable nodeId# 165 Pick Disable nodeId# 199 Pick Add nodeId# 239 Add nodeId# 240 Pick Enable nodeId# 199 Pick Enable nodeId# 179 Enable nodeId# 212 Delete nodeId# 26 Disable nodeId# 231 Ena ... odeId# 20244 Pick Disable nodeId# 20298 Delete nodeId# 20259 Pick Add nodeId# 20307 Delete nodeId# 20271 Add nodeId# 20308 Add nodeId# 20309 Delete nodeId# 20270 Add nodeId# 20310 Add nodeId# 20311 Disable nodeId# 20297 Add nodeId# 20312 Delete nodeId# 20312 Pick Enable nodeId# 20298 Pick Disable nodeId# 20307 Add nodeId# 20313 Delete nodeId# 20291 Disable nodeId# 20313 Pick Enable nodeId# 20258 Delete nodeId# 20283 Enable nodeId# 20296 Add nodeId# 20314 Delete nodeId# 20261 Pick Delete nodeId# 20296 Delete nodeId# 20311 Enable nodeId# 20307 Pick Disable nodeId# 20314 Disable nodeId# 20308 Enable nodeId# 20285 Disable nodeId# 20310 Delete nodeId# 20309 Enable nodeId# 20297 Disable nodeId# 20302 Enable nodeId# 20310 Enable nodeId# 20313 Disable nodeId# 20298 Disable nodeId# 20310 Add nodeId# 20315 Pick Add nodeId# 20316 Add nodeId# 20317 Delete nodeId# 20315 Pick Pick Delete nodeId# 20258 Disable nodeId# 20297 Enable nodeId# 20310 Enable nodeId# 20297 Enable nodeId# 20308 Pick Add nodeId# 20318 Enable nodeId# 20301 Delete nodeId# 20317 Pick Enable nodeId# 20302 Disable nodeId# 20316 Pick Disable nodeId# 20281 Disable nodeId# 20313 Pick Disable nodeId# 20308 Add nodeId# 20319 Enable nodeId# 20262 Enable nodeId# 20305 Delete nodeId# 20262 Disable nodeId# 20307 Disable nodeId# 20306 Delete nodeId# 20314 Enable nodeId# 20298 Enable nodeId# 20299 Pick Enable nodeId# 20306 Enable nodeId# 20281 Pick Disable nodeId# 20301 Disable nodeId# 20299 Delete nodeId# 20281 Pick Enable nodeId# 20299 Add nodeId# 20320 Add nodeId# 20321 Add nodeId# 20322 Delete nodeId# 20313 Pick Add nodeId# 20323 Pick Disable nodeId# 20275 Pick Delete nodeId# 20298 Add nodeId# 20324 Enable nodeId# 20308 Pick Disable nodeId# 20305 Delete nodeId# 20318 Add nodeId# 20325 Pick Disable nodeId# 20299 Add nodeId# 20326 Delete nodeId# 20319 Enable nodeId# 20301 Enable nodeId# 20316 Disable nodeId# 20323 Add nodeId# 20327 Disable nodeId# 20308 Disable nodeId# 20285 Pick Add nodeId# 20328 Disable nodeId# 20327 Disable nodeId# 20322 Delete nodeId# 20307 Add nodeId# 20329 Pick Add nodeId# 20330 Add nodeId# 20331 Delete nodeId# 20310 Pick Add nodeId# 20332 Disable nodeId# 20316 Add nodeId# 20333 Delete nodeId# 20285 Pick Enable nodeId# 20322 Add nodeId# 20334 Enable nodeId# 20323 Pick Enable nodeId# 20305 Delete nodeId# 20306 Pick Add nodeId# 20335 Pick Enable nodeId# 20316 Pick Disable nodeId# 20301 Disable nodeId# 20321 Enable nodeId# 20301 Enable nodeId# 20299 Add nodeId# 20336 Delete nodeId# 20329 Delete nodeId# 20300 Disable nodeId# 20333 Enable nodeId# 20333 Enable nodeId# 20308 Enable nodeId# 20327 Pick Add nodeId# 20337 Pick Delete nodeId# 20322 Enable nodeId# 20275 Enable nodeId# 20321 Pick Pick Add nodeId# 20338 Delete nodeId# 20337 Pick Pick Add nodeId# 20339 Pick Disable nodeId# 20297 Enable nodeId# 20297 Disable nodeId# 20331 Disable nodeId# 20327 Pick Add nodeId# 20340 Enable nodeId# 20331 Add nodeId# 20341 Pick Delete nodeId# 20301 Enable nodeId# 20327 Add nodeId# 20342 Pick Add nodeId# 20343 Pick Pick Add nodeId# 20344 Delete nodeId# 20342 Add nodeId# 20345 Add nodeId# 20346 Add nodeId# 20347 Delete nodeId# 20321 Pick Pick Pick Add nodeId# 20348 Pick Pick Add nodeId# 20349 Delete nodeId# 20305 Pick Delete nodeId# 20336 Add nodeId# 20350 Add nodeId# 20351 Delete nodeId# 20299 Disable nodeId# 20334 Disable nodeId# 20327 Pick Disable nodeId# 20340 Disable nodeId# 20344 Pick Delete nodeId# 20275 Add nodeId# 20352 Disable nodeId# 20352 Enable nodeId# 20334 Add nodeId# 20353 Delete nodeId# 20345 Pick Delete nodeId# 20347 Pick Add nodeId# 20354 Disable nodeId# 20334 Enable nodeId# 20327 Pick Enable nodeId# 20344 Add nodeId# 20355 Pick Add nodeId# 20356 Delete nodeId# 20350 Delete nodeId# 20330 Delete nodeId# 20323 Enable nodeId# 20334 Disable nodeId# 20344 Pick Disable nodeId# 20354 Add nodeId# 20357 Enable nodeId# 20344 Pick Pick Disable nodeId# 20353 Disable nodeId# 20326 Add nodeId# 20358 Disable nodeId# 20349 Pick Add nodeId# 20359 Delete nodeId# 20341 Add nodeId# 20360 Enable nodeId# 20352 Delete nodeId# 20325 Pick Disable nodeId# 20352 Delete nodeId# 20348 Enable nodeId# 20352 Enable nodeId# 20326 Enable nodeId# 20340 Disable nodeId# 20338 Enable nodeId# 20349 Enable nodeId# 20338 Enable nodeId# 20354 Pick Delete nodeId# 20328 Enable nodeId# 20353 Pick Disable nodeId# 20324 Enable nodeId# 20324 Add nodeId# 20361 Pick Disable nodeId# 20324 Pick Enable nodeId# 20324 Delete nodeId# 20355 Delete nodeId# 20357 Add nodeId# 20362 Disable nodeId# 20344 Delete nodeId# 20344 Add nodeId# 20363 Add nodeId# 20364 Add nodeId# 20365 Add nodeId# 20366 Disable nodeId# 20343 Enable nodeId# 20343 Disable nodeId# 20361 Add nodeId# 20367 Add nodeId# 20368 Delete nodeId# 20359 Pick Enable nodeId# 20361 Add nodeId# 20369 Pick Add nodeId# 20370 Pick Pick Add nodeId# 20371 Pick Delete nodeId# 20340 Disable nodeId# 20327 Disable nodeId# 20335 Enable nodeId# 20327 Enable nodeId# 20335 Add nodeId# 20372 Disable nodeId# 20368 Add nodeId# 20373 Enable nodeId# 20368 Pick Delete nodeId# 20334 Pick Delete nodeId# 20335 Delete nodeId# 20371 Delete nodeId# 20352 Add nodeId# 20374 Pick Disable nodeId# 20327 Disable nodeId# 20373 Enable nodeId# 20373 Enable nodeId# 20327 Disable nodeId# 20338 Enable nodeId# 20338 Add nodeId# 20375 Delete nodeId# 20331 Disable nodeId# 20332 Enable nodeId# 20332 Pick Add nodeId# 20376 Delete nodeId# 20356 Add nodeId# 20377 Add nodeId# 20378 Add nodeId# 20379 Add nodeId# 20380 Add nodeId# 20381 Delete nodeId# 20367 Delete nodeId# 20377 Pick Disable nodeId# 20332 Disable nodeId# 20381 Disable nodeId# 20333 Delete nodeId# 20333 Delete nodeId# 20370 Delete nodeId# 20332 Enable nodeId# 20381 Pick Pick Delete nodeId# 20358 Disable nodeId# 20338 Pick Add nodeId# 20382 Enable nodeId# 20338 Pick Pick Disable nodeId# 20346 Enable nodeId# 20346 Add nodeId# 20383 Add nodeId# 20384 Delete nodeId# 20353 Pick Add nodeId# 20385 Delete nodeId# 20316 Add nodeId# 20386 Pick Delete nodeId# 20324 Add nodeId# 20387 Pick Add nodeId# 20388 Delete nodeId# 20369 Add nodeId# 20389 Add nodeId# 20390 Delete nodeId# 20378 Pick Disable nodeId# 20297 Enable nodeId# 20297 Add nodeId# 20391 Add nodeId# 20392 Add nodeId# 20393 Add nodeId# 20394 Add nodeId# 20395 Pick Add nodeId# 20396 Delete nodeId# 20364 Disable nodeId# 20382 Add nodeId# 20397 Enable nodeId# 20382 Add nodeId# 20398 Add nodeId# 20399 Delete nodeId# 20373 Disable nodeId# 20343 Disable nodeId# 20387 Pick Enable nodeId# 20343 Enable nodeId# 20387 Pick Add nodeId# 20400 Pick Pick Add nodeId# 20401 Add nodeId# 20402 Add nodeId# 20403 Add nodeId# 20404 Add nodeId# 20405 Add nodeId# 20406 Disable nodeId# 20391 Enable nodeId# 20391 Add nodeId# 20407 Delete nodeId# 20402 Add nodeId# 20408 Disable nodeId# 20339 Add nodeId# 20409 Add nodeId# 20410 Disable nodeId# 20382 Pick Delete nodeId# 20390 Enable nodeId# 20339 Add nodeId# 20411 Delete nodeId# 20392 Enable nodeId# 20382 Add nodeId# 20412 Pick Add nodeId# 20413 Disable nodeId# 20408 Pick Add nodeId# 20414 Delete nodeId# 20327 Enable nodeId# 20408 Disable nodeId# 20384 Delete nodeId# 20388 Enable nodeId# 20384 Disable nodeId# 20366 Add nodeId# 20415 Disable nodeId# 20396 Disable nodeId# 20360 Enable nodeId# 20360 Delete nodeId# 20308 Delete nodeId# 20363 Enable nodeId# 20396 Delete nodeId# 20382 Pick Add nodeId# 20416 Disable nodeId# 20404 Enable nodeId# 20366 Add nodeId# 20417 Enable nodeId# 20404 Add nodeId# 20418 Disable nodeId# 20391 Pick Disable nodeId# 20297 Delete nodeId# 20406 Enable nodeId# 20391 Delete nodeId# 20360 Delete nodeId# 20396 Delete nodeId# 20339 Add nodeId# 20419 Delete nodeId# 20297 Pick Disable nodeId# 20419 Delete nodeId# 20385 Pick Pick Add nodeId# 20420 Pick Add nodeId# 20421 Pick Enable nodeId# 20419 Pick Delete nodeId# 20405 Delete nodeId# 20408 Add nodeId# 20422 Pick Pick Disable nodeId# 20401 Delete nodeId# 20395 Add nodeId# 20423 Enable nodeId# 20401 Delete nodeId# 20412 Pick Pick Delete nodeId# 20417 Delete nodeId# 20376 Pick Disable nodeId# 20400 Enable nodeId# 20400 Pick Disable nodeId# 20380 Add nodeId# 20424 Disable nodeId# 20389 Delete nodeId# 20326 Add nodeId# 20425 Delete nodeId# 20374 Add nodeId# 20426 Delete nodeId# 20366 Pick Disable nodeId# 20354 Enable nodeId# 20380 Disable nodeId# 20420 Pick Add nodeId# 20427 Enable nodeId# 20389 Disable nodeId# 20391 Add nodeId# 20428 Delete nodeId# 20393 Disable nodeId# 20404 Disable nodeId# 20400 Add nodeId# 20429 Delete nodeId# 20422 Add nodeId# 20430 Pick Pick Enable nodeId# 20404 Add nodeId# 20431 Enable nodeId# 20420 Delete nodeId# 20394 Enable nodeId# 20354 Add nodeId# 20432 Pick Pick Enable nodeId# 20400 Pick Disable nodeId# 20354 Add nodeId# 20433 Add nodeId# 20434 Enable nodeId# 20354 Pick Add nodeId# 20435 Pick Add nodeId# 20436 Delete nodeId# 20407 Pick Pick Pick Pick Pick Pick Enable nodeId# 20391 Pick Add nodeId# 20437 Add nodeId# 20438 Pick Disable nodeId# 20362 Pick Add nodeId# 20439 Enable nodeId# 20362 Delete nodeId# 20428 Add nodeId# 20440 Pick Pick Add nodeId# 20441 Pick Delete nodeId# 20320 Pick Add nodeId# 20442 Disable nodeId# 20362 Disable nodeId# 20338 Add nodeId# 20443 Enable nodeId# 20362 Disable nodeId# 20424 Add nodeId# 20444 Add nodeId# 20445 Disable nodeId# 20432 Pick Delete nodeId# 20436 Delete nodeId# 20302 Delete nodeId# 20423 Pick Pick Enable nodeId# 20432 Enable nodeId# 20424 Pick Enable nodeId# 20338 Add nodeId# 20446 Add nodeId# 20447 Pick Pick Pick Delete nodeId# 20416 Delete nodeId# 20400 Disable nodeId# 20410 Disable nodeId# 20445 Delete nodeId# 20410 Enable nodeId# 20445 Add nodeId# 20448 Disable nodeId# 20401 Pick Pick Delete nodeId# 20361 Pick Delete nodeId# 20409 Add nodeId# 20449 Delete nodeId# 20444 Delete nodeId# 20403 Disable nodeId# 20441 Pick Disable nodeId# 20380 Add nodeId# 20450 Enable nodeId# 20380 Enable nodeId# 20401 Delete nodeId# 20427 Delete nodeId# 20448 Add nodeId# 20451 Pick Pick Pick Disable nodeId# 20437 Enable nodeId# 20441 Delete nodeId# 20368 Delete nodeId# 20398 Pick Disable nodeId# 20442 Disable nodeId# 20386 Disable nodeId# 20426 Enable nodeId# 20442 Disable nodeId# 20450 Pick Pick Add nodeId# 20452 Delete nodeId# 20435 Enable nodeId# 20426 Enable nodeId# 20386 Disable nodeId# 20451 Delete nodeId# 20439 Enable nodeId# 20451 Pick Add nodeId# 20453 Delete nodeId# 20375 Enable nodeId# 20450 Disable nodeId# 20432 Add nodeId# 20454 Delete nodeId# 20440 Delete nodeId# 20449 Pick Pick Enable nodeId# 20432 Disable nodeId# 20380 Disable nodeId# 20421 Disable nodeId# 20397 Disable nodeId# 20383 Add nodeId# 20455 Add nodeId# 20456 Delete nodeId# 20456 Enable nodeId# 20380 Delete nodeId# 20431 Pick Add nodeId# 20457 Pick Add nodeId# 20458 Disable nodeId# 20386 Delete nodeId# 20415 Enable nodeId# 20437 Disable nodeId# 20442 Delete nodeId# 20455 Pick Pick Disable nodeId# 20362 Disable nodeId# 20452 Enable nodeId# 20397 Pick Delete nodeId# 20419 Disable nodeId# 20454 Pick Enable nodeId# 20362 Add nodeId# 20459 Pick Disable nodeId# 20343 Delete nodeId# 20457 Enable nodeId# 20452 Delete nodeId# 20338 Delete nodeId# 20433 Pick Disable nodeId# 20397 Pick Pick Pick Pick Add nodeId# 20460 Disable nodeId# 20458 Pick Pick Pick Delete nodeId# 20387 Pick Delete nodeId# 20453 Disable nodeId# 20434 Add nodeId# 20461 Pick Add nodeId# 20462 Enable nodeId# 20434 Enable nodeId# 20421 Enable nodeId# 20442 Add nodeId# 20463 Enable nodeId# 20386 Enable nodeId# 20458 Add nodeId# 20464 Disable nodeId# 20346 Enable nodeId# 20346 Delete nodeId# 20391 Disable nodeId# 20426 Enable nodeId# 20397 Delete nodeId# 20451 Delete nodeId# 20346 Enable nodeId# 20426 Disable nodeId# 20389 Delete nodeId# 20381 Pick Delete nodeId# 20401 Enable nodeId# 20454 Enable nodeId# 20389 Delete nodeId# 20414 Delete nodeId# 20413 Delete nodeId# 20437 Delete nodeId# 20458 Pick Pick Delete nodeId# 20463 Enable nodeId# 20383 Add nodeId# 20465 Enable nodeId# 20343 Add nodeId# 20466 Delete nodeId# 20421 Add nodeId# 20467 Disable nodeId# 20420 Pick Disable nodeId# 20372 Disable nodeId# 20432 Delete nodeId# 20452 Disable nodeId# 20454 Enable nodeId# 20454 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] Test command err: 2026-01-10T12:24:54.765670Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/0014ff/r3tmp/tmpapEadq/pdisk_1.dat 2026-01-10T12:24:54.815644Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.815731Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.819027Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.819081Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:54.822407Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.717520Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.718930Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.719008Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.719051Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.720279Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:24:55.721214Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/d9xi/0014ff/r3tmp/tmpUnkQYz/pdisk_1.dat driveSize# 7900 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# -0.0125 driveSize# 8000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0 driveSize# 8100 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0.0125 driveSize# 16000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 1 relativeError# 0 driveSize# 24000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 12 SlotSizeInUnits# 2 relativeError# 0 driveSize# 31000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 2 relativeError# -0.03125 driveSize# 50000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 13 SlotSizeInUnits# 4 relativeError# -0.03846153846 driveSize# 50000 unitSizeInBytes# 100 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 32 relativeError# -0.0234375 driveSize# 18000 unitSizeInBytes# 200 maxSlots# 16 -> ExpectedSlotCount# 11 SlotSizeInUnits# 8 relativeError# 0.02272727273 driveSize# 1 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 1 relativeError# 0 driveSize# 2 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 2 relativeError# 0 driveSize# 3 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# -0.25 driveSize# 4 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0 driveSize# 5 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0.25 driveSize# 6 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.25 driveSize# 7 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.125 driveSize# 8 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0 driveSize# 9 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.125 driveSize# 10 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.25 driveSize# 11 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.375 driveSize# 12 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.25 driveSize# 13 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.1875 driveSize# 14 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.125 driveSize# 15 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.0625 driveSize# 16 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0 driveSize# 17 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.0625 driveSize# 18 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.125 driveSize# 19 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.1875 driveSize# 20 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.25 driveSize# 21 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.3125 driveSize# 22 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.375 driveSize# 23 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.4375 driveSize# 24 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.25 driveSize# 25 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.21875 driveSize# 26 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.1875 driveSize# 27 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.15625 driveSize# 28 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.125 driveSize# 29 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.09375 driveSize# 30 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.0625 driveSize# 31 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.03125 driveSize# 32 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0 driveSize# 33 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.03125 driveSize# 34 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.0625 driveSize# 35 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.09375 driveSize# 36 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.125 driveSize# 37 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.15625 driveSize# 38 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.1875 driveSize# 39 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.21875 driveSize# 40 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.25 driveSize# 41 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.28125 driveSize# 42 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.3125 driveSize# 43 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.34375 driveSize# 44 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.375 driveSize# 45 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.40625 driveSize# 46 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.4375 driveSize# 47 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.46875 driveSize# 48 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.25 driveSize# 49 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.234375 driveSize# 50 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.21875 driveSize# 51 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.203125 driveSize# 52 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.1875 driveSize# 53 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.171875 driveSize# 54 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.15625 driveSize# 55 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.140625 driveSize# 56 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.125 driveSize# 57 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.109375 driveSize# 58 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.09375 driveSize# 59 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.078125 driveSize# 60 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.0625 driveSize# 61 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.046875 driveSize# 62 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.03125 driveSize# 63 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.015625 driveSize# 64 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0 driveSize# 65 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.015625 driveSize# 66 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.03125 driveSize# 67 unitSizeInBytes# 1 ... iskId# 1001 2026-01-10T12:24:58.028371Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1001 2026-01-10T12:24:58.028409Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1001 2026-01-10T12:24:58.028448Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1001 2026-01-10T12:24:58.028483Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1001 2026-01-10T12:24:58.028541Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1001 2026-01-10T12:24:58.028601Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1001 2026-01-10T12:24:58.028639Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1001 2026-01-10T12:24:58.028675Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1001 2026-01-10T12:24:58.028715Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1001 2026-01-10T12:24:58.028751Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1001 2026-01-10T12:24:58.028783Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1001 2026-01-10T12:24:58.028824Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1001 2026-01-10T12:24:58.028865Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1001 2026-01-10T12:24:58.028899Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1001 2026-01-10T12:24:58.028957Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1001 2026-01-10T12:24:58.029000Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1001 2026-01-10T12:24:58.029036Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1001 2026-01-10T12:24:58.029081Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1001 2026-01-10T12:24:58.029117Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1001 2026-01-10T12:24:58.029158Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1001 2026-01-10T12:24:58.029201Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1001 2026-01-10T12:24:58.029270Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1001 2026-01-10T12:24:58.029344Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D66B6CFB080 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1001 2026-01-10T12:24:58.029421Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000005 PDiskId# 1001 2026-01-10T12:24:58.029488Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2562560001604 PDiskId# 1001 2026-01-10T12:24:58.029538Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1001 2026-01-10T12:24:58.029780Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-10T12:24:58.029889Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2384.377840 2026-01-10T12:24:58.030155Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1001 2026-01-10T12:24:58.030237Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-10T12:24:58.030269Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:58.030293Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.030339Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-10T12:24:58.030390Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000006 PDiskId# 1001 2026-01-10T12:24:58.030429Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2562560001860 PDiskId# 1001 2026-01-10T12:24:58.030490Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.030688Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001860 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2384.378798 2026-01-10T12:24:58.030781Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-10T12:24:58.030808Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001860 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:58.030842Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.030867Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-10T12:24:58.030900Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.041064Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.051294Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.061500Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.071748Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.082015Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.091647Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000007 PDiskId# 1001 2026-01-10T12:24:58.091754Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560002104 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 2384.440102 2026-01-10T12:24:58.091978Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-10T12:24:58.092066Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560002104 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-10T12:24:58.092131Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.092246Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 13 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } 2026-01-10T12:24:58.102464Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.112719Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.122968Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.133239Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.143478Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.153716Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.163932Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.174107Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-10T12:24:58.184343Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.039752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.131808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.131871Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.146840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.146950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.147142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.154869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.155125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.155773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.156012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.158192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.158353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.163288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.163487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.163605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.164428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.328521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.332975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.333610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2026-01-10T12:24:58.248111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2026-01-10T12:24:58.248155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.248185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:24:58.248297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-10T12:24:58.248422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:58.250667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.251042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:58.251084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:24:58.251290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:58.251330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-10T12:24:58.251910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.251950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-10T12:24:58.252032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:24:58.252098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:24:58.252140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-10T12:24:58.252163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:24:58.252195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-10T12:24:58.252228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-10T12:24:58.252259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-10T12:24:58.252292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-10T12:24:58.252391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:24:58.252428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-10T12:24:58.252452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-10T12:24:58.252941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:24:58.253020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-10T12:24:58.253047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-10T12:24:58.253079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-10T12:24:58.253114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:58.253195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-10T12:24:58.253243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:824:2791] 2026-01-10T12:24:58.255533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-10T12:24:58.255598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:24:58.255634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1036:2958] TestWaitNotification: OK eventTxId 105 2026-01-10T12:24:58.256119Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:24:58.256302Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 238us result status StatusSuccess 2026-01-10T12:24:58.256940Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 2 ChildPartitionIds: 3 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 4 ChildPartitionIds: 5 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { ToBound: "?" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\277" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "?" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\277" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> UpsertLoad::ShouldDropCreateTable [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.039909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.131871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.131931Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.146909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.146998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.147177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.153468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.153680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.154319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.154508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.156171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.156312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.163304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.163509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.163572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.164438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.342478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.343947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.344621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944 2026-01-10T12:24:58.401642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.401763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.401809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.401958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.402022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.402068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.407074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:58.410412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:58.410470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:58.411103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:58.411181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:58.411239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:58.412460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1146:3051] sender: [1:1206:2058] recipient: [1:15:2062] 2026-01-10T12:24:58.491139Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:24:58.491491Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 345us result status StatusSuccess 2026-01-10T12:24:58.492248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 109 2026-01-10T12:24:58.496432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:58.496689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.496855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-10T12:24:58.499517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:58.499775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-10T12:24:58.500171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-10T12:24:58.500223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-10T12:24:58.500671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-10T12:24:58.500787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-10T12:24:58.500834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:1211:3103] TestWaitNotification: OK eventTxId 109 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 111 2026-01-10T12:24:58.504502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:58.504775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 111:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.504994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:58.507362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusInvalidParameter Reason: "Unable to change bounds of non-root partition: 1" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:58.507634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 111, wait until txId: 111 TestWaitNotification wait txId: 111 2026-01-10T12:24:58.507977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 111: send EvNotifyTxCompletion 2026-01-10T12:24:58.508022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 111 2026-01-10T12:24:58.508586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-01-10T12:24:58.508701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-10T12:24:58.508750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:1218:3110] TestWaitNotification: OK eventTxId 111 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.534805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.534894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.534936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.534971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.535014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.535063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.535152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.535244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.536181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.536493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.611364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.611415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.626606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.626693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.626901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.633508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.633745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.634426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.634611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.636750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.636917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.637938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.638004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.638078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.638117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.638151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.638331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.749054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.749926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.750780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... opose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-10T12:24:58.586993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.587042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:24:58.587267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:24:58.587445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:24:58.587496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:58.590572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.590975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:58.591020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:24:58.591174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:24:58.591343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:58.591382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:24:58.591419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:24:58.591652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.591696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:24:58.591780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:24:58.591807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:58.591845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:24:58.591873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:58.591902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:24:58.591934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:58.591964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:24:58.591990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:24:58.592170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:24:58.592214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2026-01-10T12:24:58.592241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:24:58.592274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:24:58.593736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:58.593843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:58.593879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:24:58.593907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:24:58.593952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:24:58.594613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:58.594676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:58.594725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:24:58.594753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:24:58.594776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:58.594828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2026-01-10T12:24:58.594874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:824:2791] 2026-01-10T12:24:58.597671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:24:58.597762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:24:58.597810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:24:58.597839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:958:2894] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } TestModificationResults wait txId: 106 2026-01-10T12:24:58.601131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:58.601349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.601464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split and merge operations disabled, at schemeshard: 72057594046678944 2026-01-10T12:24:58.603350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split and merge operations disabled" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:58.603585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split and merge operations disabled, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:24:58.603840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:24:58.603876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:24:58.604256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:24:58.604342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:24:58.604381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1016:2934] TestWaitNotification: OK eventTxId 106 |94.3%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.039563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.113697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.113745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.127881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.127980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.130009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.139727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.139893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.144012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.147833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.154616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.155999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.163327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.163509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.163554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.164395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.306624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.307981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.308048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-10T12:24:58.355830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-10T12:24:58.355860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-10T12:24:58.355885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:24:58.355915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:24:58.355942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:24:58.356162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.356287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.356439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:24:58.356501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:24:58.356536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:24:58.356558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:24:58.356580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:24:58.356773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.356875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.357134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-10T12:24:58.357364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:24:58.357798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.357922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.358403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.358491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.358859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.359048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.359112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.359228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.359434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.359540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.360923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:58.368659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:58.373595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:58.373687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:58.374111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:58.374176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:58.374232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:58.374459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1090:2991] sender: [1:1148:2058] recipient: [1:15:2062] 2026-01-10T12:24:58.445034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:24:58.445401Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 420us result status StatusSuccess 2026-01-10T12:24:58.446285Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> TSequence::CreateSequence >> TSequence::CreateSequenceParallel >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.039542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.112930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.113000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.128055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.128142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.129972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.140214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.140464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.144017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.147924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.154569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.156018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.163325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.163571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.163653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.164438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.333936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.334948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.335943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.336010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... _common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:24:59.285577Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-10T12:24:59.285699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:24:59.285745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:59.287298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.287500Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:59.287523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:24:59.287628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:24:59.287755Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:59.287783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-10T12:24:59.287809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-10T12:24:59.288140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.288171Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-10T12:24:59.288251Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:24:59.288278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:59.288309Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-10T12:24:59.288330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:59.288357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-10T12:24:59.288388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-10T12:24:59.288416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-10T12:24:59.288438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-10T12:24:59.288531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:24:59.288559Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-10T12:24:59.288599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:24:59.288619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:24:59.289532Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:59.289587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:59.289613Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:24:59.289645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:24:59.289673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:24:59.290077Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:59.290119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-10T12:24:59.290137Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-10T12:24:59.290153Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:24:59.290170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:59.290209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-10T12:24:59.292410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-10T12:24:59.292459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-10T12:24:59.292597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-10T12:24:59.292627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-10T12:24:59.292861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-10T12:24:59.292919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-10T12:24:59.292945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:980:2909] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } TestModificationResults wait txId: 105 2026-01-10T12:24:59.295417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:24:59.295654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.295827Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:59.297327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:24:59.297568Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-10T12:24:59.297897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-10T12:24:59.297940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-10T12:24:59.298277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-10T12:24:59.298376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-10T12:24:59.298420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:987:2916] TestWaitNotification: OK eventTxId 105 |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:52.236264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:52.357087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:52.365960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:52.366443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:52.366580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:52.696428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:52.696522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:52.701207Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047889926879 != 1768047889926882 2026-01-10T12:24:52.713092Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:52.755566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:52.839525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:53.112100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:53.125345Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:53.225931Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2026-01-10T12:24:53.450314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:53.450461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:53.450699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:53.450755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:53.471931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:53.711626Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2026-01-10T12:24:53.712829Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-10T12:24:53.724254Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor finished in 0.011208s, errors=0 2026-01-10T12:24:53.724453Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:53.724561Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-10T12:24:53.779882Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor finished in 0.055106s, errors=0 2026-01-10T12:24:53.779963Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:971:2822] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-10T12:24:56.778022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:56.789258Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:56.789666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:56.789743Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:57.038172Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:57.038281Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:57.070335Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1768047894225844 != 1768047894225848 2026-01-10T12:24:57.075227Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.121490Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:57.189729Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:57.464833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:57.478513Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:57.580231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:57.794276Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2026-01-10T12:24:57.794399Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 2026-01-10T12:24:58.205826Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.411127s, errors=0 2026-01-10T12:24:58.205952Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 2026-01-10T12:24:58.212361Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2026-01-10T12:24:58.235755Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:994:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.235909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.236432Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1005:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.236510Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.425051Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2026-01-10T12:24:58.440985Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1046:2895], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.441079Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.441282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1049:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.441314Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.453870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.504254Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-10T12:24:58.669492Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2026-01-10T12:24:58.669743Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-10T12:24:58.681471Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor finished in 0.011466s, errors=0 2026-01-10T12:24:58.681713Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-10T12:24:58.681823Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-10T12:24:58.786403Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor finished in 0.104340s, errors=0 2026-01-10T12:24:58.786499Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:1138:2969] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.039034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.040038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.116009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.116057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.128028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.128120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.130009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.139400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.139683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.143979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.147846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.154827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.156021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.163344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.163518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.163567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.163620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.164415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.292991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.295968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.296024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.296095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.296179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.296266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.296348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-10T12:24:59.515949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-10T12:24:59.515972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-10T12:24:59.516131Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516223Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516339Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-10T12:24:59.516413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:24:59.516435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-10T12:24:59.516456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:24:59.516539Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516801Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-10T12:24:59.516977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:24:59.517338Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.517449Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.517826Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.517900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518332Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518597Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.518673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519083Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519320Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519409Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519441Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.519473Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.523636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:59.527678Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:59.527748Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:59.527888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:59.527926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:59.527961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:59.528053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1092:2986] sender: [2:1151:2058] recipient: [2:15:2062] 2026-01-10T12:24:59.584836Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:24:59.585071Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 261us result status StatusSuccess 2026-01-10T12:24:59.585751Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:24:57.038456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:24:57.038577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:24:57.038644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:24:57.038699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:24:57.038751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:24:57.038797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:24:57.038857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:24:57.039638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:24:57.041392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:24:57.139435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:24:57.139510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:57.155582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:24:57.155686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:24:57.155911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:24:57.164504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:24:57.164783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:24:57.165515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:24:57.165756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:24:57.168364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.168545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:24:57.169872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:57.169945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:57.170031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:24:57.170080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:24:57.170137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:24:57.170391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:24:57.348823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.353498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.353740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.353855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.353954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:24:57.354918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ionPlan, step: 300, at tablet: 72057594046678944 2026-01-10T12:24:59.514788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 107:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-10T12:24:59.531187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-01-10T12:24:59.531413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-10T12:24:59.531520Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-10T12:24:59.531589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.531642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-10T12:24:59.531835Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 128 -> 240 2026-01-10T12:24:59.532096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:59.536386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.537062Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:24:59.537132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:24:59.537463Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:24:59.537515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 107, path id: 39 2026-01-10T12:24:59.538097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-10T12:24:59.538164Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-10T12:24:59.538311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:24:59.538360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:24:59.538406Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-10T12:24:59.538452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:24:59.538500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-10T12:24:59.538552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-10T12:24:59.538605Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-10T12:24:59.538645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-10T12:24:59.538847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:24:59.538900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 1 2026-01-10T12:24:59.538945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-10T12:24:59.540200Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:24:59.540318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-10T12:24:59.540363Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-10T12:24:59.540421Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:24:59.540470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:24:59.540568Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 1 2026-01-10T12:24:59.540622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:818:2784] 2026-01-10T12:24:59.543355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-10T12:24:59.543481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-10T12:24:59.543531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:1090:3003] TestWaitNotification: OK eventTxId 107 2026-01-10T12:24:59.544254Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:24:59.544537Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 319us result status StatusSuccess 2026-01-10T12:24:59.545368Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] >> KqpKnn::BitVectorKnnPushdown >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 7843, MsgBus: 26406 2026-01-10T12:24:33.762164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707795528891720:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:33.762714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:33.967356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:33.974288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:33.974394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:34.032142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707795528891682:2081] 1768047873757744 != 1768047873757747 2026-01-10T12:24:34.041803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:34.047445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:34.136121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:34.136141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:34.136147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:34.136214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:34.149792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:34.561070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:34.768309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:34.974051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:34.974109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:34.974191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:34.974213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:35.000741Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7593707795528891743:2100], interval end# 2026-01-10T12:24:35.000000Z, event interval end# 2026-01-10T12:24:35.000000Z 2026-01-10T12:24:35.000789Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7593707795528891743:2100], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-01-10T12:24:35.003789Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7593707795528891671:2070], interval end# 2026-01-10T12:24:35.000000Z, event interval end# 2026-01-10T12:24:35.000000Z 2026-01-10T12:24:35.003829Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7593707795528891671:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-01-10T12:24:35.975221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:35.975263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:35.975294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:35.975307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:36.904187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707808413794457:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.904337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.904718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707808413794467:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.904798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.975795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:36.975831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:36.975913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:36.975928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:37.136703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:7593707812708761781:2509], Recipient [1:7593707795528892029:2143]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:24:37.136743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:24:37.136755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-10T12:24:37.136778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:7593707812708761777:2506], Recipient [1:7593707795528892029:2143]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-10T12:24:37.136789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:24:37.270582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:24:37.271059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, at schemeshard: 72057594046644480 2026-01-10T12:24:37.271275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2026-01-10T12:24:37.271884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-10T12:24:37.271939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-10T12:24:37.271956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715658:0 type: TxCreateTable target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-10T12:24:37.271995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:24:37.272128Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:83: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] path /Root/Followers ShardIndices size 1 2026-01-10T12:24:37.272176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-10T12:24:37.272189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715658:0 1 -> 2 2026-01-10T12:24:37.272933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_create_table.cpp:766: TCreateTable Propose creating new table opId# 281474976715658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 38] schemeshard# 72057594046644480 tx# Work ... e_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-01-10T12:24:57.449248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-10T12:24:57.449262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-10T12:24:57.449316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 800 row count 4 2026-01-10T12:24:57.449348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 4, DataSize 800 2026-01-10T12:24:57.449358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2026-01-10T12:24:57.449420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-01-10T12:24:57.449474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-10T12:24:57.449497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 0 row count 0 2026-01-10T12:24:57.449511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:24:57.449516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2026-01-10T12:24:57.449583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:393: Do not want to split tablet 72075186224037888 by the CPU load from the follower ID 2, reason: SplitByLoadNotEnabledForTable 2026-01-10T12:24:57.449641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-10T12:24:57.449740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-10T12:24:57.449754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-10T12:24:57.449764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:24:57.449919Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0.001044 TabletId: 72075186224037888 NodeId: 1 StartTime: 1768047877286 AccessTime: 1768047877870 UpdateTime: 1768047877701 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:24:57.450056Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0.000106 TabletId: 72075186224037888 NodeId: 1 StartTime: 1768047877353 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-10T12:24:57.987768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:57.987801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:57.987834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:57.987843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:58.988190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:58.988246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:58.988285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:58.988304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats for /Root/Followers , attempt 2 2026-01-10T12:24:59.972235Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7593707907198042851:2492], owner: [1:7593707907198042849:2490], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-10T12:24:59.972887Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7593707907198042851:2492], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-10T12:24:59.973122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7593707907198042851:2492], Recipient [1:7593707795528892029:2143]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-10T12:24:59.973146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-10T12:24:59.973289Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7593707907198042851:2492], row count: 2, finished: 1 2026-01-10T12:24:59.973350Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7593707907198042851:2492], owner: [1:7593707907198042849:2490], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-10T12:24:59.974425Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7593707795528891743:2100], database# /Root, query hash# 3266603936201095014, cpu time# 251427 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/Followers' ... SELECT from partition_stats, attempt 0 2026-01-10T12:24:59.988647Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:59.988688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:24:59.988743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7593707795528892029:2143], Recipient [1:7593707795528892029:2143]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:24:59.988758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:25:00.000395Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7593707795528891671:2070], interval end# 2026-01-10T12:25:00.000000Z, event interval end# 2026-01-10T12:25:00.000000Z 2026-01-10T12:25:00.000408Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7593707795528891743:2100], interval end# 2026-01-10T12:25:00.000000Z, event interval end# 2026-01-10T12:25:00.000000Z 2026-01-10T12:25:00.000451Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7593707795528891671:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-01-10T12:25:00.000469Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7593707795528891743:2100], query logs count# 1, processor ids count# 1, processor id to database count# 0 2026-01-10T12:25:00.377423Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7593707911493010162:2500], owner: [1:7593707911493010160:2498], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-10T12:25:00.377900Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7593707911493010162:2500], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-10T12:25:00.378105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7593707911493010162:2500], Recipient [1:7593707795528892029:2143]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-10T12:25:00.378131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-10T12:25:00.378256Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7593707911493010162:2500], row count: 2, finished: 1 2026-01-10T12:25:00.378322Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7593707911493010162:2500], owner: [1:7593707911493010160:2498], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-10T12:25:00.382211Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7593707795528891743:2100], database# /Root, query hash# 18339066598126957035, cpu time# 387744 |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpKnn::FloatVectorKnnPushdown >> KqpKnn::VectorSearchKnnPushdown+Nullable |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> KqpKnn::Uint8VectorKnnPushdown |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> test_auditlog.py::test_single_dml_query_logged[update] >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> DataShardTxOrder::ZigZag_oo8_dirty |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |94.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |94.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:49.487121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:49.584708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:49.603323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:49.603834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:49.603953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:49.971259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:49.971368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:49.977693Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047887303585 != 1768047887303588 2026-01-10T12:24:49.985997Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:50.031601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:50.122659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:50.418535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:50.432471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:50.544136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:50.858975Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2026-01-10T12:24:50.860800Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-10T12:24:50.876319Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.015153s, errors=0 2026-01-10T12:24:50.876625Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2026-01-10T12:24:50.876735Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2026-01-10T12:24:50.877940Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:367: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-10T12:24:50.879506Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:401: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started fullscan actor# [1:964:2822] 2026-01-10T12:24:50.879662Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 100 2026-01-10T12:24:50.879700Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-10T12:24:50.879997Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-10T12:24:50.883009Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.002925s, sampled# 100, iter finished# 1, oks# 100 2026-01-10T12:24:50.883153Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:417: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 100 2026-01-10T12:24:50.883336Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:446: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started# 10 actors each with inflight# 1 2026-01-10T12:24:50.883389Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called 2026-01-10T12:24:50.883418Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883463Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called 2026-01-10T12:24:50.883488Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883521Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called 2026-01-10T12:24:50.883552Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883571Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called 2026-01-10T12:24:50.883583Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883599Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} Bootstrap called 2026-01-10T12:24:50.883611Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883639Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} Bootstrap called 2026-01-10T12:24:50.883656Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883673Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} Bootstrap called 2026-01-10T12:24:50.883693Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883708Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} Bootstrap called 2026-01-10T12:24:50.883720Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883747Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} Bootstrap called 2026-01-10T12:24:50.883770Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.883793Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} Bootstrap called 2026-01-10T12:24:50.883811Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-10T12:24:50.885393Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} session: ydb://session/3?node_id=1&id=ZDQyYTRiZmMtNDRmZTJmMGUtNDkzMTgzZi1mOWUwNWIwYQ== 2026-01-10T12:24:50.886784Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} session: ydb://session/3?node_id=1&id=Mzc0MWNjZmEtYzMwZjE5ZWMtODQwMmJkYTYtZjM5NTI5MjQ= 2026-01-10T12:24:50.887939Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} session: ydb://session/3?node_id=1&id=ZmU3MWE4OTItZTM0ZjYxYzYtY2IxNDIwNzQtNmFlYjg5ZjA= 2026-01-10T12:24:50.889028Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} session: ydb://session/3?node_id=1&id=YTQyNjUxYWMtOGY3ZGEyODYtZjZjNjlhZDQtOWJlOGNiMDc= 2026-01-10T12:24:50.891013Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} session: ydb://session/3?node_id=1&id=NGYxNzY0NTgtMTAzM2JlMWItNTY2YzRhYTYtMmM5OWY0YzE= 2026-01-10T12:24:50.891101Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} session: ydb://session/3?node_id=1&id=MjE4ZDMyYzQtOGM4N2Y4OC0yNWMxNTA1MS1iNzBjODA0OQ== 2026-01-10T12:24:50.892255Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} session: ydb://session/3?node_id=1&id=YjA5NDk4MDItNGE2MjQzMTMtM2NjODVmMDMtMTY3YzUxMjA= 2026-01-10T12:24:50.893315Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} session: ydb://session/3?node_id=1&id=NjVkNWI3YzEtYWJlMDA2NjMtN2VkMjEzOGUtNjBiYWYzYzE= 2026-01-10T12:24:50.895348Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} session: ydb://session/3?node_id=1&id=OWE3ODE2YmYtZjRkYTNkZGQtNDEwZjkwZjUtZTU5NmE3OWM= 2026-01-10T12:24:50.895421Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} session: ydb://session/3?node_id=1&id=ODIwOWE1ZGMtNjA1ODcyMmQtNGM1NGJmZWYtYmE0YTMyZTk= 2026-01-10T12:24:50.904068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:988:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you ... _manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:25:00.311237Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1075:2918] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:25:00.311858Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1078:2919] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:25:00.312670Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1083:2921] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-10T12:25:00.420281Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1042:2894], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420399Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1043:2895], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420460Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1044:2896], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420541Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1045:2897], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420610Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1046:2898], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420666Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1047:2899], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420722Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1048:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420778Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1057:2909], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420886Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1059:2911], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.420974Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1071:2917], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:00.456766Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1184:3008] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:00.905387Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 6} finished in 0.640106s, errors=0 2026-01-10T12:25:00.905622Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 6 { Tag: 6 DurationMs: 640 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:00.919016Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2113:3430] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:01.297840Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 9} finished in 1.027431s, errors=0 2026-01-10T12:25:01.298040Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 9 { Tag: 9 DurationMs: 1027 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:01.312497Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3020:3836] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:01.753562Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 1.493937s, errors=0 2026-01-10T12:25:01.753697Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 3 { Tag: 3 DurationMs: 1493 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:01.768979Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3927:4242] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:02.378226Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 5} finished in 2.114837s, errors=0 2026-01-10T12:25:02.378710Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 5 { Tag: 5 DurationMs: 2114 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:02.395620Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4834:4648] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:03.088288Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 2.830932s, errors=0 2026-01-10T12:25:03.088776Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 2 { Tag: 2 DurationMs: 2830 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:03.106068Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5741:5054] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:03.841584Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 8} finished in 3.571301s, errors=0 2026-01-10T12:25:03.841827Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 8 { Tag: 8 DurationMs: 3571 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:03.860321Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:6648:5460] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:04.671487Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 11} finished in 4.397539s, errors=0 2026-01-10T12:25:04.671790Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 11 { Tag: 11 DurationMs: 4397 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:04.690499Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7555:5866] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:05.601200Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 10} finished in 5.329016s, errors=0 2026-01-10T12:25:05.601789Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 10 { Tag: 10 DurationMs: 5329 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:05.621712Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:8462:6272] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:06.602050Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 6.338815s, errors=0 2026-01-10T12:25:06.602535Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 4 { Tag: 4 DurationMs: 6338 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:06.622863Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:9369:6678] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:07.410854Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 7} finished in 7.143852s, errors=0 2026-01-10T12:25:07.411203Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 7 { Tag: 7 DurationMs: 7143 OperationsOK: 100 OperationsError: 0 } 2026-01-10T12:25:07.411261Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:481: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 7.157278s, oks# 1000, errors# 0 2026-01-10T12:25:07.411533Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.5%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::Large_3 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-10T12:25:08.827140Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.827183Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.827208Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.827599Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.828995Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.840751Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.841788Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:25:08.844456Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.844486Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.844510Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.844824Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.845230Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.845441Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.845610Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:25:08.847572Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-10T12:25:08.849666Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.849704Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.849753Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.850049Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.851333Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.851453Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.851674Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:25:08.858546Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.859786Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-10T12:25:08.859951Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:25:08.860016Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-10T12:25:08.861269Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.861296Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.861316Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.861580Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.862006Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.862185Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.862349Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-10T12:25:08.863223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:25:08.863527Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:25:08.863870Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:25:08.864133Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:25:08.864244Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:25:08.864274Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:25:08.864304Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:25:08.864513Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-10T12:25:08.864584Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:25:08.864599Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:25:08.864612Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:25:08.864749Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-10T12:25:08.864814Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-10T12:25:08.864834Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-10T12:25:08.864857Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-10T12:25:08.864921Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-10T12:25:08.864964Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-10T12:25:08.864991Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-10T12:25:08.865016Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:25:08.865113Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-10T12:25:08.866231Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.866256Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.866290Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.866576Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.866946Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.867081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.867244Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-10T12:25:08.868341Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:25:08.868581Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-10T12:25:08.868874Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-10T12:25:08.869075Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-10T12:25:08.869196Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:25:08.869272Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:25:08.869302Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-10T12:25:08.869343Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-10T12:25:08.869385Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:25:08.869652Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2026-01-10T12:25:08.869760Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-10T12:25:08.869787Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-10T12:25:08.869827Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-10T12:25:08.869853Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-10T12:25:08.869882Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-10T12:25:08.870070Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2026-01-10T12:25:08.871543Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.871576Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.871600Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-10T12:25:08.871969Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-10T12:25:08.872472Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-10T12:25:08.872651Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:25:08.872893Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-10T12:25:08.874044Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:25:08.874934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-10T12:25:08.875249Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-10T12:25:08.875410Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-10T12:25:08.876162Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-10T12:25:08.876194Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-10T12:25:08.876210Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-10T12:25:08.876228Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-10T12:25:08.876267Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-10T12:25:08.876295Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-10T12:25:08.876456Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2026-01-10T12:25:08.877612Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 |94.5%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchUpdate::SimplePartitions >> KqpBatchDelete::DeleteOn >> KqpBatchUpdate::ManyPartitions_1 >> KqpBatchUpdate::Large_3 >> KqpBatchDelete::MultiStatement |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::ManyPartitions_2 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpBatchUpdate::TableWithIndex >> KqpBatchUpdate::HasTxControl >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:07.803863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:07.909615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:07.917521Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:07.917638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:07.917681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:08.447889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:08.565604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:08.565755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:08.603254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:08.739997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:09.466522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:09.467119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:09.467167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:09.467200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:09.467749Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:09.537103Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:10.119583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:13.324190Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:13.332160Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-10T12:24:13.336162Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:13.375728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:13.375862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:13.415917Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:13.418283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:13.625769Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:13.625903Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:13.627666Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.629202Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.630148Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.630648Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.630765Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.631059Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.631143Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.631284Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.631451Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:13.647239Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:13.892280Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:13.927352Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:13.927472Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:13.969878Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:13.970474Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:13.970713Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:13.970790Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:13.970854Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:13.970908Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:13.970964Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:13.971024Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:13.975483Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:13.998188Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:13.998322Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2215:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:14.028359Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2271:2631] 2026-01-10T12:24:14.029001Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2271:2631], schemeshard id = 72075186224037897 2026-01-10T12:24:14.096820Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2334:2661] 2026-01-10T12:24:14.100022Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2026-01-10T12:24:14.111434Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Describe result: PathErrorUnknown 2026-01-10T12:24:14.111515Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Creating table 2026-01-10T12:24:14.111603Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Created ESchemeOpCreateTable transaction for path: /Root/Shared1/.metadata/_statistics 2026-01-10T12:24:14.126051Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2400:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:14.130744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:14.138594Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:14.138714Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:14.152825Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:14.306446Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared1/.metadata/script_executions 2026-01-10T12:24:14.367312Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:14.791617Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:14.902269Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:14.902359Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2341:2667] Owner: [3:2340:2666]. Column diff is empty, finishing 2026-01-10T12:24:15.498225Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 20 ... node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:7291:3785], StatRequests.size() = 1 2026-01-10T12:24:54.844550Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:7338:3797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:54.845068Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:24:54.845123Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:7338:3797], StatRequests.size() = 1 2026-01-10T12:24:56.150647Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:56.395310Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:56.395397Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:56.395447Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:56.395500Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:24:56.499411Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:7387:3811]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:56.499837Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:24:56.499874Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:7387:3811], StatRequests.size() = 1 2026-01-10T12:24:57.984895Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:7441:3826]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:57.985488Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:24:57.985547Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:7441:3826], StatRequests.size() = 1 2026-01-10T12:24:59.384747Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:59.384952Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2026-01-10T12:24:59.385056Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:24:59.385457Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:59.419887Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:59.419968Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:59.420336Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:59.434801Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:59.618988Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:7485:3839]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:59.619520Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:24:59.619570Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:7485:3839], StatRequests.size() = 1 2026-01-10T12:25:01.153920Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:7535:3854]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:01.154490Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:25:01.154554Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:7535:3854], StatRequests.size() = 1 2026-01-10T12:25:02.122849Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-10T12:25:02.122910Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-10T12:25:02.123195Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:25:02.139016Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-10T12:25:02.940852Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:7580:3865]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:02.941441Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:25:02.941500Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:7580:3865], StatRequests.size() = 1 2026-01-10T12:25:04.176615Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:25:04.176680Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.590000s, at schemeshard: 72075186224037899 2026-01-10T12:25:04.176866Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:25:04.191885Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:04.236769Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:04.428342Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:7625:3878]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:04.428832Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:25:04.428884Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:7625:3878], StatRequests.size() = 1 2026-01-10T12:25:05.904634Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:7676:3895]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:05.905153Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:25:05.905208Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:7676:3895], StatRequests.size() = 1 2026-01-10T12:25:06.526947Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038900 2026-01-10T12:25:06.527008Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.656000s, at schemeshard: 72075186224038900 2026-01-10T12:25:06.527241Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038900, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:25:06.542167Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-10T12:25:07.354010Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:25:07.354173Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2026-01-10T12:25:07.354327Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:25:07.354660Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:07.389169Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:07.389247Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:07.389596Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:25:07.403384Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:07.518134Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:7725:3908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:07.518443Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:25:07.518475Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:7725:3908], StatRequests.size() = 1 2026-01-10T12:25:07.518994Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7727:3826]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:07.523246Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:25:07.523895Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-10T12:25:07.523955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:7742:3831] 2026-01-10T12:25:07.524011Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:7742:3831] 2026-01-10T12:25:07.525007Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:7745:3832] 2026-01-10T12:25:07.525441Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:7745:3832], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:25:07.525507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:25:07.525720Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:7742:3831], server id = [2:7745:3832], tablet id = 72075186224038895, status = OK 2026-01-10T12:25:07.525956Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:25:07.526047Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:7727:3826], StatRequests.size() = 1 2026-01-10T12:25:07.526310Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> KqpBatchDelete::HasTxControl |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 >> TSequence::AlterTableSetDefaultFromSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:00.195338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:00.195458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:00.195505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:00.195552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:00.195608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:00.195666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:00.195778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:00.195864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:00.196759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:00.198300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:00.307336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:00.307402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:00.325380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:00.325531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:00.325711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:00.334703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:00.334968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:00.337715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:00.344210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:00.351309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:00.354056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:00.363819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:00.363921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:00.364038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:00.364124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:00.364184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:00.365476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:00.556156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.557941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.558018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.558083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.558150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.558277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.558355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... y parts: 3/4 2026-01-10T12:25:10.507189Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 3/4 2026-01-10T12:25:10.507229Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2026-01-10T12:25:10.507275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2026-01-10T12:25:10.509199Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.509249Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-10T12:25:10.509330Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:748:2725] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-10T12:25:10.509741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.509784Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.509930Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:25:10.509968Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:25:10.510018Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:25:10.510082Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:25:10.510351Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:25:10.510470Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:25:10.510505Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-10T12:25:10.510536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-10T12:25:10.510591Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-10T12:25:10.510620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-10T12:25:10.510661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2026-01-10T12:25:10.510734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:819:2776] message: TxId: 102 2026-01-10T12:25:10.510796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-10T12:25:10.510852Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:25:10.510900Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:25:10.511033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:25:10.511085Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-10T12:25:10.511114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-10T12:25:10.511150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:25:10.511179Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-10T12:25:10.511206Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-10T12:25:10.511253Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-10T12:25:10.511289Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:3 2026-01-10T12:25:10.511316Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:3 2026-01-10T12:25:10.511372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-10T12:25:10.512029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.512070Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.512269Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:25:10.512314Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-10T12:25:10.512388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:25:10.512448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-10T12:25:10.512543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:25:10.512882Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.512921Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.512972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.513018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.513073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.513102Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.515670Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:25:10.515716Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.518051Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.518197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:10.518284Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:819:2776] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-10T12:25:10.518441Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:25:10.518490Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:928:2877] 2026-01-10T12:25:10.518749Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:930:2879], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:25:10.518795Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:25:10.518825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-10T12:25:10.518904Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-10T12:25:10.522101Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1011:2959], Recipient [7:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-10T12:25:10.522175Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-10T12:25:10.522301Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:25:10.522559Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 249us result status StatusPathDoesNotExist 2026-01-10T12:25:10.522762Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000039, drop txId: 102" Path: "/MyRoot/Table" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> KqpBatchDelete::Returning |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpBatchDelete::SimplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:00.195339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:00.195482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:00.195542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:00.195591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:00.195638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:00.195669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:00.195779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:00.195853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:00.196834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:00.198248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:00.295485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:00.295575Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:00.313871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:00.314024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:00.317600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:00.327944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:00.329397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:00.337716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:00.344153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:00.351451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:00.354054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:00.363629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:00.363724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:00.363812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:00.363890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:00.364016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:00.365437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:00.550735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:00.553892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Latency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1405 } } CommitVersion { Step: 5000050 TxId: 114 } debug: NTableState::TProposedWaitParts operationId# 114:0 2026-01-10T12:25:11.503207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:25:11.503638Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1458:3394], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:11.503699Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:11.503735Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:25:11.503952Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269551620, Sender [7:1395:3339], Recipient [7:127:2152]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1395 RawX2: 30064774411 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-10T12:25:11.503996Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5280: StateWork, processing event TEvDataShard::TEvSchemaChanged 2026-01-10T12:25:11.504110Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1395 RawX2: 30064774411 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-10T12:25:11.504157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2026-01-10T12:25:11.504324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1395 RawX2: 30064774411 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-10T12:25:11.504399Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:25:11.504511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1395 RawX2: 30064774411 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-10T12:25:11.504622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:11.504672Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-10T12:25:11.504748Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-10T12:25:11.504807Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 114:0 129 -> 240 2026-01-10T12:25:11.504984Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:25:11.507784Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.508161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-10T12:25:11.508224Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.509965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-10T12:25:11.510014Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.510144Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-10T12:25:11.510175Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.510330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-10T12:25:11.510402Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.510461Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2026-01-10T12:25:11.510609Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1395:3339] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2026-01-10T12:25:11.510958Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-10T12:25:11.510999Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-10T12:25:11.511061Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-10T12:25:11.511113Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2026-01-10T12:25:11.511258Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:25:11.511312Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-10T12:25:11.511358Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-10T12:25:11.511401Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-10T12:25:11.511443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-10T12:25:11.511490Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2026-01-10T12:25:11.511568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:796:2763] message: TxId: 114 2026-01-10T12:25:11.511634Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-10T12:25:11.511684Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 114:0 2026-01-10T12:25:11.511741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 114:0 2026-01-10T12:25:11.511906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-10T12:25:11.514012Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-10T12:25:11.514120Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:796:2763] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2026-01-10T12:25:11.514302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-10T12:25:11.514352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1426:3362] 2026-01-10T12:25:11.514556Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1428:3364], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:25:11.514629Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-10T12:25:11.514661Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2026-01-10T12:25:11.515998Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [7:1467:3403], Recipient [7:127:2152]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2026-01-10T12:25:11.516069Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-10T12:25:11.518990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:25:11.519268Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2026-01-10T12:25:11.519762Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2026-01-10T12:25:11.520005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-10T12:25:11.522651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:25:11.522963Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2026-01-10T12:25:11.523036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink |94.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::ServerlessTimeIntervals [GOOD] >> KqpBatchUpdate::UpdateOn >> KqpBatchDelete::ManyPartitions_3 >> KqpBatchDelete::TableNotExists >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> KqpKnn::Int8VectorKnnPushdown [GOOD] >> KqpKnn::BitVectorKnnPushdown [GOOD] >> KqpBatchUpdate::UnknownColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2026-01-10T12:25:07.921873Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:25:07.922943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:07.922999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:07.933948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:25:07.934275Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:25:07.934566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:25:07.965890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:25:07.974176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:25:07.974797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:25:07.976404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:25:07.976480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:25:07.976532Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:25:07.976897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:25:07.976969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:25:07.977050Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:25:08.047706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:25:08.072045Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:25:08.072265Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:25:08.072357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:25:08.072403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:25:08.072434Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:25:08.072464Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:25:08.072686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:08.072726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:08.072984Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:25:08.073065Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:25:08.073108Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:25:08.073147Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:25:08.073176Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:25:08.073208Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:25:08.073254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:25:08.073283Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:25:08.073313Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:25:08.073374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:08.073402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:08.073446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:25:08.078836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:25:08.078901Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:25:08.078994Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:25:08.079183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:25:08.079233Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:25:08.079285Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:25:08.079337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:25:08.079370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:25:08.079399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:25:08.079426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:25:08.079715Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:25:08.079741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:25:08.079793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:25:08.079837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:25:08.079890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:25:08.079916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:25:08.079941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:25:08.079967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:25:08.079985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:25:08.092518Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:25:08.092603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:25:08.092659Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:25:08.092739Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:25:08.092817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:25:08.093474Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:08.093539Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:25:08.093601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:25:08.093697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:25:08.093738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:25:08.093918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:25:08.093962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:25:08.094005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:25:08.094045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:25:08.097939Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:25:08.098025Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:25:08.098307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:08.098347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:08.098411Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:25:08.098458Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:25:08.098498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:25:08.098546Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:25:08.098584Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-10T12:25:14.289565Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:25:14.289819Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:349:2316], Recipient [2:349:2316]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:14.289871Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:14.289924Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-10T12:25:14.289960Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:25:14.289988Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:25:14.290022Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-01-10T12:25:14.290050Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-01-10T12:25:14.290079Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.290106Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-01-10T12:25:14.290134Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-01-10T12:25:14.290170Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-01-10T12:25:14.290968Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-01-10T12:25:14.291014Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291040Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-01-10T12:25:14.291066Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-01-10T12:25:14.291098Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-01-10T12:25:14.291137Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291160Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-01-10T12:25:14.291182Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-01-10T12:25:14.291206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-01-10T12:25:14.291257Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-01-10T12:25:14.291286Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-01-10T12:25:14.291346Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-01-10T12:25:14.291390Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291435Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-01-10T12:25:14.291461Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-01-10T12:25:14.291492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-01-10T12:25:14.291548Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291572Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-01-10T12:25:14.291594Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-01-10T12:25:14.291616Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-01-10T12:25:14.291641Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291661Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-01-10T12:25:14.291712Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-01-10T12:25:14.291741Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-01-10T12:25:14.291776Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291801Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-01-10T12:25:14.291823Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-01-10T12:25:14.291846Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-01-10T12:25:14.291871Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.291894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-01-10T12:25:14.291918Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-01-10T12:25:14.291941Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-01-10T12:25:14.291980Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.292039Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-01-10T12:25:14.292067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-01-10T12:25:14.292169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-01-10T12:25:14.292586Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-01-10T12:25:14.292643Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:25:14.292699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.292727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-01-10T12:25:14.292753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-01-10T12:25:14.292778Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-01-10T12:25:14.293000Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-01-10T12:25:14.293042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-01-10T12:25:14.293072Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-01-10T12:25:14.293111Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-01-10T12:25:14.293167Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-10T12:25:14.293197Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-01-10T12:25:14.293224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-01-10T12:25:14.293257Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:25:14.293281Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:25:14.293311Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-10T12:25:14.293339Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:25:14.309244Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-10T12:25:14.309330Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-10T12:25:14.309402Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:25:14.309457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-10T12:25:14.309539Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:25:14.309605Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-10T12:25:14.310772Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-10T12:25:14.310825Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-10T12:25:14.310868Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:25:14.310900Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-10T12:25:14.310970Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-10T12:25:14.311013Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:23:58.849058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:23:58.965082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:23:58.973492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:23:58.973832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:23:58.974012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:23:59.499229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:59.642223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:59.642370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:59.704226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:59.830285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:00.894136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:00.894843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:00.894897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:00.895079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:00.895818Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:00.969236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:01.581888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:05.155645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:05.165001Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:05.169061Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:05.210301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.210429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.252598Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:05.254685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.452670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.452769Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.454246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.455033Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.455825Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.461125Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.461644Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.461812Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.461981Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.462215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.462410Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.480481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.764608Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:05.801638Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:05.801743Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:05.860349Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:05.862190Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:05.862491Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:05.862553Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:05.862611Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:05.862666Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:05.862733Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:05.862811Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:05.863462Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:05.880032Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.880195Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.910643Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:24:05.910945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:24:05.993246Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:24:05.996049Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:24:06.025770Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:24:06.025846Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:24:06.025936Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:24:06.031581Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:06.039985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:06.048204Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:06.048359Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:06.071942Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:06.301604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:24:06.332186Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:06.753494Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:06.854157Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:06.854243Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:24:07.533755Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... 5Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:24:36.852805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5619:4383], StatRequests.size() = 1 2026-01-10T12:24:37.325434Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-10T12:24:37.325494Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 27.920000s, at schemeshard: 72075186224037905 2026-01-10T12:24:37.325718Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:24:37.341642Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:37.525947Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:38.071158Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5662:4405]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:38.071579Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:24:38.071627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5662:4405], StatRequests.size() = 1 2026-01-10T12:24:39.081001Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5691:4417]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:39.081381Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:24:39.081436Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5691:4417], StatRequests.size() = 1 2026-01-10T12:24:39.550084Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:24:39.550611Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-10T12:24:39.551072Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:39.551167Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:39.562538Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:39.562621Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:39.562938Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:39.577332Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:40.014808Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5724:4432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:40.015203Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:24:40.015255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5724:4432], StatRequests.size() = 1 2026-01-10T12:24:40.015837Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5726:4434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:40.020455Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-10T12:24:40.020527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5726:4434], StatRequests.size() = 1 2026-01-10T12:24:44.368017Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:44.368133Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:44.368441Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:44.382610Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:49.130858Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:49.130937Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:49.131215Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:49.146398Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:53.735670Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:53.735737Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:53.735959Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:53.749762Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:55.819834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:57.230113Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:57.230201Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:57.230470Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:24:57.244493Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:58.259181Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-10T12:24:58.259255Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 23.044000s, at schemeshard: 72075186224037899 2026-01-10T12:24:58.259551Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:24:58.274338Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:59.660234Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:24:59.660309Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:59.660354Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:24:59.660386Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:25:02.217537Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:02.217611Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:02.217824Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:25:02.231511Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:03.591921Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-10T12:25:03.592006Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 23.771000s, at schemeshard: 72075186224037905 2026-01-10T12:25:03.592252Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-10T12:25:03.606770Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:08.591925Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-10T12:25:08.592397Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2026-01-10T12:25:08.592778Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:08.592869Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-10T12:25:08.603793Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:08.603872Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:08.604169Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:25:08.617131Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:12.448002Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:12.448108Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:12.448402Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-10T12:25:12.473736Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22750, MsgBus: 15720 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:04.123460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:04.209203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:04.216741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:04.217231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:04.217373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:04.507966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:04.508099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:04.512836Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047901428803 != 1768047901428806 2026-01-10T12:25:04.524972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:04.567779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:04.694458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:05.012244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:05.012318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:05.012353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:05.012820Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:05.025092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:05.308723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:05.400256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:05.713881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.063340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.348280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.337361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.337790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.338577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1977:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.338736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.439794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.640707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.883793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.157627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.392224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.645429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.922663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.217296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.586298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.586432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.586984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.587089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.587259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.595643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:09.763091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:09.826727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2859:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:11.839715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpKnn::FloatVectorKnnPushdown [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 14569, MsgBus: 65127 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:03.210890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:03.394401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:03.410761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:03.411327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:03.411454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:03.889143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:03.889287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:03.893992Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047900638243 != 1768047900638246 2026-01-10T12:25:03.917551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:03.963308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:04.091792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:04.937048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:04.937228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:04.937277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:04.937879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:04.950147Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:05.300973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:05.411944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:05.708214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.115730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.392261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.337365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.337712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.338588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.338699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.439809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.655270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.909888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.183912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.434106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.708519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.984218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.281035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.641792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2786:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.641962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.642525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.642673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.642804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.650100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:09.822692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2795:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:09.892971Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2855:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:11.860224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpKnn::Uint8VectorKnnPushdown [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpBatchDelete::DeleteOn [GOOD] >> KqpBatchDelete::MultiStatement [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> KqpBatchUpdate::HasTxControl [GOOD] >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 28365, MsgBus: 64514 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:06.802795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:06.880438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:06.886561Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:06.886884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:06.886958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:07.178141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:07.178247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:07.182687Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047904713871 != 1768047904713874 2026-01-10T12:25:07.195925Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:07.241551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:07.372681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:07.697512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:07.697608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:07.697648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:07.698335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:07.711092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:07.990393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:08.085283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.309598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.622288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.864575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.571055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.571368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.572042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.572112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.598228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.807560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.048676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.346855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.589197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.873033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.143768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.479572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.858134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.858317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.858778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.858995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.859244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.864829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:12.026066Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:12.075062Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:13.568423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3151, MsgBus: 27613 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:06.085370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:06.171053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:06.178651Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:06.179059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:06.179168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:06.482727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:06.482849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:06.487935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047903653156 != 1768047903653159 2026-01-10T12:25:06.498294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:06.542059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:06.671560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:07.002092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:07.002189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:07.002245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:07.002894Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:07.015160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:07.272716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:07.388591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.654131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.999713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.250846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.970484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:08.971063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:08.972001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:08.972162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.006088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.221706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.453829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.729047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.968284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.270631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.564018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.866947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.238487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.238648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.239110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.239197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.239298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.246707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:11.406668Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:11.473948Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:13.138173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn [GOOD] Test command err: Trying to start YDB, gRPC: 28174, MsgBus: 9733 2026-01-10T12:25:09.840506Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707948807472723:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.840562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.128242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.167949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.168045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.212226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707948807472689:2081] 1768047909833916 != 1768047909833919 2026-01-10T12:25:10.246010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.250012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.421619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.429568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.429656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.429664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.429782Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.852192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.069801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.132404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.279180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.442437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.508965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.376267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965987343748:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.376413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.377026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965987343758:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.377119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.739682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.774271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.805633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.840031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.880982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.940488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.983043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.087208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970282311933:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.087304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.087365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970282311938:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.087910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970282311941:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.087980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.097208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.111089Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707970282311940:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.198428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707970282311993:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.840775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707948807472723:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.840840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.017798Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707974577279595:2539], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with ON 2026-01-10T12:25:16.018183Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWE5ODBiNy05NDM0NjNlZi01YjFlNTdjNi1hNGIwOGNiNw==, ActorId: [1:7593707974577279586:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwcyx468gb6jznamjgf66, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 1584, MsgBus: 28155 2026-01-10T12:25:09.841551Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707950223958147:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.841607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.143025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.153802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.153887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.250644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.255018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.389176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.430156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.430191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.430197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.430260Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.851619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.055558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.060715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:11.127929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.285225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.457465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.536399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.164224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967403829167:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.164353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.164671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967403829177:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.164742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.734108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.767624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.796360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.826917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.872611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.917127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.975349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.097967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971698797353:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.098063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.098557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971698797358:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.098629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971698797359:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.098677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.103393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.120561Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707971698797362:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.221211Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707971698797415:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.841821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707950223958147:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.841887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.132052Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707975993765015:2539], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:25:16.132433Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyOTIwYjEtODY1NjVjOTAtZjk2OWRmZTQtZGIxMzVkZTU=, ActorId: [1:7593707975993765006:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwcyw8q4g1ratszxnrrkp, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-10T12:25:16.160402Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707980288732317:2542], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:25:16.162293Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyOTIwYjEtODY1NjVjOTAtZjk2OWRmZTQtZGIxMzVkZTU=, ActorId: [1:7593707975993765006:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwd3gcbf50158h32wh6pa, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-10T12:25:16.182434Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707980288732321:2544], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:25:16.182770Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyOTIwYjEtODY1NjVjOTAtZjk2OWRmZTQtZGIxMzVkZTU=, ActorId: [1:7593707975993765006:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwd4c2bnrf4cteavh7mq3, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-10T12:25:16.207188Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707980288732325:2546], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:25:16.207764Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyOTIwYjEtODY1NjVjOTAtZjk2OWRmZTQtZGIxMzVkZTU=, ActorId: [1:7593707975993765006:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwd4xab01mm5a29rwgpsm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-10T12:25:16.221845Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707980288732331:2548], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:25:16.222199Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyOTIwYjEtODY1NjVjOTAtZjk2OWRmZTQtZGIxMzVkZTU=, ActorId: [1:7593707975993765006:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxwd5n11mmx7dps3npc9a2, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 3 column: 29 } severity: 1 } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 26767, MsgBus: 29994 2026-01-10T12:25:09.841064Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707949913664149:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.841138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.118920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.157617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.157732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.203010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.205437Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707949913664112:2081] 1768047909833967 != 1768047909833970 2026-01-10T12:25:10.217201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.284099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.431370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.431405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.431412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.431563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.856255Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.084172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.090504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:11.152749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.314860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.466869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.537989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.268463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967093535171:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.268583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.268951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967093535181:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.269005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.736352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.766177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.801231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.838948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.891281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.938338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.984683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.088381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971388503347:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.088503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.092381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971388503353:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.092450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971388503352:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.092496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.097342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.115367Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707971388503356:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.203896Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707971388503407:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.841543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707949913664149:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.841605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.253380Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707979978438273:2529], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:25:16.253876Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjgxN2Q4Ny04MzBlODJhZC04NWVkN2FjZC1kN2EwNmFkMQ==, ActorId: [1:7593707979978438264:2523], ActorState: ExecuteState, LegacyTraceId: 01kekxwd504g8kbv685sre6358, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 4123, MsgBus: 28545 2026-01-10T12:25:10.383090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707953289819818:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:10.383222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.612183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.614014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.614082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.701674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.758704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.764160Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707953289819778:2081] 1768047910377588 != 1768047910377591 2026-01-10T12:25:10.850722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.850763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.850770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.850867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.885526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:11.325231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.393319Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.410732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.553878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.725437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.793368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.819046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707966174723537:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.819169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.819581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707966174723547:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.819633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.158980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.188118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.218595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.248995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.281535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.312962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.345476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.417179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.494017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970469691714:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.494096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.494493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970469691720:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.494549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970469691719:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.494653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.498703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.508542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707970469691723:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.574319Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707970469691774:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:15.383701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707953289819818:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:15.383778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.420610Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OGQwMDVjZDEtZjM3YjVjODMtZmZjZTdiMy0yMTc3Y2Y2Zg==, ActorId: [1:7593707979059626661:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwd7810j14jm072j03262, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> DataShardTxOrder::RandomPointsAndRanges [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink >> KqpBatchDelete::ManyPartitions_2 >> KqpBatchUpdate::NotIdempotent >> KqpBatchDelete::Returning [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> KqpBatchDelete::TableNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 24569, MsgBus: 63179 2026-01-10T12:25:11.450312Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707959208544098:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:11.451876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:11.763426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:11.767833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:11.767937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:11.818646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:11.894999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:11.937629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:11.964052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:11.964110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:11.964134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:11.964221Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:12.437184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:12.446186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:12.473406Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.501449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.687853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.856436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.939648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.876894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707972093447811:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.877020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.877945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707972093447821:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.878000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.185142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.216195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.255885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.293653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.331002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.367833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.408167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.478749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.578086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707976388415989:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.578181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.578457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707976388415995:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.578831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707976388415994:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.578866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.583289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:15.598632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707976388415998:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:15.683860Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707976388416049:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:16.450617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707959208544098:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:16.450694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:17.492604Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzRjMjBmNy02NDA1NTJlYS00NzQ2Y2FiNy0zMzhhOTdmMg==, ActorId: [1:7593707984978350936:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwe9pbzq7tand76wb0z3a, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates >> HttpRequest::ProbeBaseStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 18021, MsgBus: 18545 2026-01-10T12:25:12.155107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707963794964798:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:12.155201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:12.446943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:12.467559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:12.467678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:12.525825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:12.534410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:12.564197Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707963794964774:2081] 1768047912154147 != 1768047912154150 2026-01-10T12:25:12.641324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:12.641391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:12.641407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:12.641494Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:12.718045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:13.115511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:13.170658Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:13.183351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.299549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.471373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.536113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.606182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707976679868538:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.606308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.606905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707976679868548:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.606986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.914443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.946731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.978047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.009495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.043665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.081388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.116287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.193750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.271555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707980974836722:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.271642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.271999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707980974836728:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.272047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.272117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707980974836727:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.276150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:16.288587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707980974836731:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:16.394028Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707980974836782:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:17.155377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707963794964798:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:17.155455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:18.076554Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707989564771676:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2026-01-10T12:25:18.076985Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZWJlYjQ0ZDktYTIzZDBkYTQtZGY3ZWEwNTYtZGMzMGUwZjc=, ActorId: [1:7593707989564771667:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwez8fmsy4199eg4btcnq, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 7482, MsgBus: 5226 2026-01-10T12:25:14.643125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707971423102911:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.643208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:14.659287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:14.968161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:14.993479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:14.993627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:15.018134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:15.080501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:15.082108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707971423102705:2081] 1768047914604835 != 1768047914604838 2026-01-10T12:25:15.154729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:15.168673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:15.168696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:15.168703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:15.168779Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:15.584339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:15.592334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:15.642411Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:17.904128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707984308005481:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.904186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707984308005489:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.904227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.904579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707984308005496:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.904663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.909307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:17.924279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707984308005495:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:18.044435Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707988602972844:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:18.326767Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707988602972861:2334], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiDeleteTable!
:2:35: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:18.328748Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OGUwMmI0MGEtNjJjMzBhMmEtZDRmZmFkODctY2U3ZWQ5MjM=, ActorId: [1:7593707984308005477:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxwetd733rvavyvh34sa73, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 35 } message: "At function: KiDeleteTable!" end_position { row: 2 column: 35 } severity: 1 issues { position { row: 2 column: 35 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 35 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:18.451466Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707988602972879:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:41: Error: At function: KiDeleteTable!
:3:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:18.451971Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OGUwMmI0MGEtNjJjMzBhMmEtZDRmZmFkODctY2U3ZWQ5MjM=, ActorId: [1:7593707984308005477:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxwf800r5q61gb4zcrjvm4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 41 } message: "At function: KiDeleteTable!" end_position { row: 3 column: 41 } severity: 1 issues { position { row: 3 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2026-01-10T12:23:46.124256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707592112472536:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:46.124356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:23:46.385226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:23:46.432273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:46.432405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:46.483679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:46.549037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:46.586665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:23:46.752624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:23:46.752661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:23:46.752668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:23:46.752731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:23:47.099140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:23:47.124427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:23:47.134810Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:23:48.934077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707600702408400:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.934193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.934464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707600702408410:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:48.934511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.275833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/Dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-10T12:23:49.276037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:23:49.276123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976710658:1, at schemeshard: 72057594046644480 2026-01-10T12:23:49.277863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:23:49.277897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:23:49.279626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2026-01-10T12:23:49.345002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047829387, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:23:49.433005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:0 2026-01-10T12:23:49.433073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:1 2026-01-10T12:23:49.446251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604997375947:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.446339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.446546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707604997375949:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.446606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:23:49.467153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2026-01-10T12:23:49.467692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:23:49.467728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:23:49.472858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2026-01-10T12:23:49.484607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768047829527, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:23:49.491424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2026-01-10T12:23:51.124212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707592112472536:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:23:51.124314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2026-01-10T12:23:59.475019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-10T12:23:59.482303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-10T12:23:59.482360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:23:59.577404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-10T12:23:59.616341Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-10T12:23:59.635307Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-10T12:24:01.366264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:24:01.366291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded partitions 1 2026-01-10T12:24:01.586665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_move_table.cpp:668: TMoveTable Propose, from: /Root/Dir/Foo, to: /Root/Dir/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-10T12:24:01.587597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:24:01.596772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAc ... 4046644480, LocalPathId: 40] was 4 2026-01-10T12:25:18.177965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2026-01-10T12:25:18.177980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7593707987249469367:2769] 2026-01-10T12:25:18.178786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707647947049325 RawX2: 4503603922340183 } TabletId: 72075186224037890 State: 4 2026-01-10T12:25:18.178856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:25:18.180364Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037892 parts [ [72075186224037890:1:115:1:12288:11304:0] ] return ack processed 2026-01-10T12:25:18.180407Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:25:18.180511Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-10T12:25:18.180573Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2026-01-10T12:25:18.182119Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:25:18.182498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7593707987249469437:2774], serverId# [1:7593707987249469444:5093], sessionId# [0:0:0] 2026-01-10T12:25:18.182674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707970069599762 RawX2: 4503603922340527 } TabletId: 72075186224037891 State: 4 2026-01-10T12:25:18.182758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:25:18.183633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-10T12:25:18.183720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-10T12:25:18.183752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-10T12:25:18.183840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:25:18.183880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:25:18.184219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-10T12:25:18.186153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:25:18.186183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:25:18.186211Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:25:18.186260Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-10T12:25:18.186344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-10T12:25:18.186777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707970069599759 RawX2: 4503603922340526 } TabletId: 72075186224037892 State: 4 2026-01-10T12:25:18.186830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:25:18.187039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7593707970069599759 RawX2: 4503603922340526 } TabletId: 72075186224037892 State: 4 2026-01-10T12:25:18.187077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-10T12:25:18.187214Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-10T12:25:18.187217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-10T12:25:18.187465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-10T12:25:18.187741Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-10T12:25:18.188117Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-10T12:25:18.188200Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-10T12:25:18.189785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:25:18.189834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:25:18.190168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-10T12:25:18.190399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-10T12:25:18.190412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-10T12:25:18.190426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-10T12:25:18.190489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-10T12:25:18.190542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-10T12:25:18.190643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-10T12:25:18.190852Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-10T12:25:18.190892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-10T12:25:18.191599Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-10T12:25:18.191688Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-10T12:25:18.191732Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-10T12:25:18.193194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-10T12:25:18.193218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-10T12:25:18.193342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:25:18.193574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-10T12:25:18.193806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-10T12:25:18.193969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-10T12:25:18.193988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-10T12:25:18.194034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-10T12:25:18.196266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:25:18.196292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-10T12:25:18.196392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-10T12:25:18.196420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-10T12:25:18.198380Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-10T12:25:18.198861Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-10T12:25:18.199058Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-10T12:25:18.199138Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 |94.6%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates >> KqpBatchUpdate::UpdateOn [GOOD] |94.6%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.095339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:03.202765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:03.210375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:03.210779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:03.210865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:03.686098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:03.790286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:03.790420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:03.829864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:03.927163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:04.676626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:04.678223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:04.678275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:04.678314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:04.678495Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:04.738128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:05.348283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:08.681449Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:08.681543Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:08.693065Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:08.722321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:08.722417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:08.763932Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:08.769701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:08.962382Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:08.962502Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:08.964574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.965328Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.966207Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.966782Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.966918Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.967184Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.967315Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.967474Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.967607Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:08.984939Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.296161Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:09.336760Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:09.336883Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:09.392142Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:09.392694Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:09.392973Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:09.393051Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:09.393111Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:09.393176Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:09.393241Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:09.393302Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:09.393886Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:09.415923Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:09.416038Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:09.443962Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2175:2625] 2026-01-10T12:24:09.445479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2175:2625], schemeshard id = 72075186224037897 2026-01-10T12:24:09.515941Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2653] 2026-01-10T12:24:09.520830Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:09.550376Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Describe result: PathErrorUnknown 2026-01-10T12:24:09.550452Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Creating table 2026-01-10T12:24:09.550533Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:09.557441Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2315:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:09.564941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:09.573805Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:09.573934Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:09.592731Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:09.776356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:09.830511Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:10.254171Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:10.384690Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:10.384768Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2665] Owner: [2:2249:2664]. Column diff is empty, finishing 2026-01-10T12:24:11.128569Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... request id = 55, ReplyToActorId = [2:5572:4600], StatRequests.size() = 1 2026-01-10T12:25:02.547807Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:5605:4614]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:02.548248Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-10T12:25:02.548305Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:5605:4614], StatRequests.size() = 1 2026-01-10T12:25:03.393618Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:03.563907Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:25:03.563977Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:03.564009Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:03.564037Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:25:04.037107Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:5645:4633]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:04.037364Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-10T12:25:04.037393Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:5645:4633], StatRequests.size() = 1 2026-01-10T12:25:05.389306Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:5677:4647]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:05.389593Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-10T12:25:05.389629Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:5677:4647], StatRequests.size() = 1 2026-01-10T12:25:06.597168Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:06.597427Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-10T12:25:06.597617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-10T12:25:06.631046Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:25:06.631137Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:06.631435Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:25:06.646023Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:07.671904Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:5711:4663]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:07.672265Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-10T12:25:07.672321Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:5711:4663], StatRequests.size() = 1 2026-01-10T12:25:09.133834Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:5754:4689]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:09.134147Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-10T12:25:09.134189Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:5754:4689], StatRequests.size() = 1 2026-01-10T12:25:10.241423Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:5787:4703]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:10.241772Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-10T12:25:10.241822Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:5787:4703], StatRequests.size() = 1 2026-01-10T12:25:11.090713Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:11.510402Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:5822:4719]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:11.510764Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-10T12:25:11.510816Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:5822:4719], StatRequests.size() = 1 2026-01-10T12:25:12.662667Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:5855:4735]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:12.662911Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-10T12:25:12.662943Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:5855:4735], StatRequests.size() = 1 2026-01-10T12:25:13.474085Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:13.474267Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 2026-01-10T12:25:13.474354Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-10T12:25:13.507492Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:13.507581Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:13.507886Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-10T12:25:13.521894Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:13.799490Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:5888:4751]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:13.799838Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-10T12:25:13.799890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:5888:4751], StatRequests.size() = 1 2026-01-10T12:25:14.815254Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 65 ], ReplyToActorId[ [2:5921:4767]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:14.815601Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 65 ] 2026-01-10T12:25:14.815657Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 65, ReplyToActorId = [2:5921:4767], StatRequests.size() = 1 2026-01-10T12:25:15.721865Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 66 ], ReplyToActorId[ [2:5954:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:15.722143Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 66 ] 2026-01-10T12:25:15.722173Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 66, ReplyToActorId = [2:5954:4782], StatRequests.size() = 1 2026-01-10T12:25:16.438909Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:16.843316Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 67 ], ReplyToActorId[ [2:5994:4799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:16.843641Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 67 ] 2026-01-10T12:25:16.843681Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 67, ReplyToActorId = [2:5994:4799], StatRequests.size() = 1 2026-01-10T12:25:17.985279Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 68 ], ReplyToActorId[ [2:6028:4813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:17.985581Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 68 ] 2026-01-10T12:25:17.985629Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 68, ReplyToActorId = [2:6028:4813], StatRequests.size() = 1 2026-01-10T12:25:18.825540Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:18.825831Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2026-01-10T12:25:18.825959Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-10T12:25:18.850793Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:18.850882Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:18.851248Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-10T12:25:18.871280Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:19.252336Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 69 ], ReplyToActorId[ [2:6064:4831]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:19.252669Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 69 ] 2026-01-10T12:25:19.252722Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 69, ReplyToActorId = [2:6064:4831], StatRequests.size() = 1 2026-01-10T12:25:19.253749Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 70 ], ReplyToActorId[ [2:6067:4834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:19.253987Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 70 ] 2026-01-10T12:25:19.254046Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 70, ReplyToActorId = [2:6067:4834], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11272 }' |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2026-01-10T12:22:30.080977Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:22:30.082085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:22:30.082142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:22:30.091642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:22:30.091959Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:22:30.092203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:22:30.138068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:22:30.145727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:22:30.146341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:22:30.148264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:22:30.148337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:22:30.148449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:22:30.148835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:22:30.148926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:22:30.149020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:22:30.236186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:22:30.272678Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:22:30.272876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:22:30.273003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:22:30.273066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:22:30.273108Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:22:30.273145Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:30.273400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.273473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.273787Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:22:30.273910Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:22:30.273975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:30.274024Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:22:30.274068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:22:30.274106Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:22:30.274156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:22:30.274194Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:22:30.274240Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:22:30.274337Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.274374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.274455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:22:30.282135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:22:30.282228Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:22:30.282341Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:22:30.282533Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:22:30.282600Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:22:30.282665Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:22:30.282724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:22:30.282763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:22:30.282805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:22:30.282847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:30.283142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:22:30.283192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:22:30.283233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:22:30.283267Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:30.283338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:22:30.283371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:22:30.283406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:22:30.283442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:30.283473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:22:30.295836Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:22:30.295917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:22:30.295954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:22:30.296018Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:22:30.296125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:22:30.296742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.296825Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:22:30.296881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:22:30.296981Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:22:30.297023Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:22:30.297159Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:22:30.297204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:22:30.297243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:22:30.297282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:22:30.301511Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:22:30.301591Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:22:30.301833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.301876Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:22:30.301943Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:22:30.301994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:22:30.302031Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:22:30.302076Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:22:30.302155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... ions 2026-01-10T12:25:15.567320Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437184 is Executed 2026-01-10T12:25:15.567349Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2026-01-10T12:25:15.567383Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437184 has finished 2026-01-10T12:25:15.567421Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:25:15.567455Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:25:15.567489Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:25:15.567525Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:25:15.595054Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:25:15.595140Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:402] at 9437184 on unit CompleteOperation 2026-01-10T12:25:15.595213Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 402] from 9437184 at tablet 9437184 send result to client [4:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-01-10T12:25:15.595293Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-01-10T12:25:15.595337Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:25:15.595637Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:25:15.595674Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2026-01-10T12:25:15.595713Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2026-01-10T12:25:15.595773Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:25:15.595797Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2026-01-10T12:25:15.595840Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:25:15.595895Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-01-10T12:25:15.595924Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:25:15.596906Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:238:2230], Recipient [4:459:2401]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-01-10T12:25:15.596967Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:25:15.597007Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2026-01-10T12:25:15.597476Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:238:2230], Recipient [4:459:2401]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-01-10T12:25:15.597518Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:25:15.597549Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2026-01-10T12:25:15.598293Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [4:238:2230], Recipient [4:348:2315]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-01-10T12:25:15.598341Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:25:15.598380Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2026-01-10T12:25:15.598465Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-01-10T12:25:15.598518Z node 4 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2026-01-10T12:25:15.598610Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437185 2026-01-10T12:25:15.598935Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [4:348:2315], Recipient [4:348:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:15.598979Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:25:15.599029Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-10T12:25:15.599067Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:25:15.599108Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2026-01-10T12:25:15.599143Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2026-01-10T12:25:15.599182Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-10T12:25:15.599216Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2026-01-10T12:25:15.599252Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit BlockFailPoint 2026-01-10T12:25:15.599282Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit BlockFailPoint 2026-01-10T12:25:15.599312Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-10T12:25:15.599335Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit BlockFailPoint 2026-01-10T12:25:15.599359Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2026-01-10T12:25:15.599384Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2026-01-10T12:25:15.601697Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2026-01-10T12:25:15.601774Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 5, NSelectRange: 3, NUpdateRow: 3, NEraseRow: 0, SelectRowRows: 5, SelectRowBytes: 40, SelectRangeRows: 55, SelectRangeBytes: 440, UpdateRowBytes: 21, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:25:15.601845Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2026-01-10T12:25:15.601878Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2026-01-10T12:25:15.601910Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2026-01-10T12:25:15.601945Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2026-01-10T12:25:15.602207Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is DelayComplete 2026-01-10T12:25:15.602242Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2026-01-10T12:25:15.602277Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2026-01-10T12:25:15.602313Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2026-01-10T12:25:15.602351Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-01-10T12:25:15.602379Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2026-01-10T12:25:15.602429Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437185 has finished 2026-01-10T12:25:15.602464Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:25:15.602493Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:25:15.602536Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-10T12:25:15.602564Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:25:15.628289Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:25:15.628387Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2026-01-10T12:25:15.628470Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-01-10T12:25:15.628566Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-01-10T12:25:15.628621Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-10T12:25:15.629605Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:348:2315], Recipient [4:238:2230]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-01-10T12:25:15.629668Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:25:15.629716Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] >> KqpBatchDelete::ManyPartitions_1 >> KqpBatchUpdate::UnknownColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 21673, MsgBus: 13301 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:04.902077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:04.987015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:04.994879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:04.995248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:04.995324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:05.352779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:05.352911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:05.358857Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047902530126 != 1768047902530129 2026-01-10T12:25:05.371051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:05.418933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:05.543540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:05.856940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:05.857038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:05.857090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:05.857741Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:05.870428Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:06.149603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:06.269847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.555767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:06.876135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.137110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.942967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.943534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.944438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.944549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:07.972543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.184314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.423513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.697804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.933923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.193690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.484730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.790271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.171903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:10.172053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:10.172533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:10.172611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:10.172762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:10.179716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:10.343324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:10.419887Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:12.199248Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037899 2026-01-10T12:25:12.199433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:1391:3127], Recipient [1:1396:3131]: {TEvReadSet step# 8501 txid# 281474976715672 TabletSource# 72075186224037900 ... r.cpp:2421: 72075186224037927 read iterator# {[1:3766:4930], 0} finished in read 2026-01-10T12:25:19.794397Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-10T12:25:19.794427Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit ExecuteRead 2026-01-10T12:25:19.794455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:18] at 72075186224037927 to execution unit CompletedOperations 2026-01-10T12:25:19.794484Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2026-01-10T12:25:19.794542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-10T12:25:19.794570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-10T12:25:19.794597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-10T12:25:19.794629Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-10T12:25:19.794713Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-10T12:25:19.794781Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-10T12:25:19.794827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-10T12:25:19.794889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:19.794915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-10T12:25:19.794939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-10T12:25:19.794967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-10T12:25:19.795004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-10T12:25:19.795035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:19.795061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-10T12:25:19.795088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-10T12:25:19.795111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-10T12:25:19.795231Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-10T12:25:19.795361Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9500/18446744073709551615 2026-01-10T12:25:19.795398Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3766:4930], 1} after executionsCount# 1 2026-01-10T12:25:19.795434Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3766:4930], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:25:19.795541Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3766:4930], 1} finished in read 2026-01-10T12:25:19.795595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:19.795623Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-10T12:25:19.795649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-10T12:25:19.795677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-10T12:25:19.795716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:19.795740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-10T12:25:19.795764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-10T12:25:19.795792Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-10T12:25:19.795853Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-10T12:25:19.796036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3766:4930], Recipient [1:3183:4482]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-10T12:25:19.796208Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-10T12:25:19.796254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-10T12:25:19.796318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:19.796347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-10T12:25:19.796373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-10T12:25:19.796403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-10T12:25:19.796442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-10T12:25:19.796474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:19.796502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-10T12:25:19.796525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-10T12:25:19.796552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-10T12:25:19.796655Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-10T12:25:19.796783Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9500/18446744073709551615 2026-01-10T12:25:19.796817Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3766:4930], 2} after executionsCount# 1 2026-01-10T12:25:19.796850Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3766:4930], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:25:19.796957Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3766:4930], 2} finished in read 2026-01-10T12:25:19.797007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:19.797038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-10T12:25:19.797065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-10T12:25:19.797092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-10T12:25:19.797129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:19.797155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-10T12:25:19.797184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-10T12:25:19.797212Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-10T12:25:19.797274Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-10T12:25:19.798975Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3766:4930], Recipient [1:3173:4476]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:25:19.799036Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-10T12:25:19.799098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3766:4930], Recipient [1:3177:4478]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:25:19.799151Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-10T12:25:19.801296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3766:4930], Recipient [1:3183:4482]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-10T12:25:19.801350Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 5683, MsgBus: 1865 2026-01-10T12:25:14.319616Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707971692101314:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.320578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:14.564197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:14.566210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:14.566300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:14.568558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:14.702923Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:14.712609Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707971692101285:2081] 1768047914315374 != 1768047914315377 2026-01-10T12:25:14.816756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:14.816782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:14.816792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:14.816900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:14.851092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:15.247757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:15.316981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.329910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:15.460294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.635508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.707175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.650807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707984577005051:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.650920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.656898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707984577005061:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.656964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.986311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.020496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.052877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.098703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.136711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.178979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.228237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.302774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.384090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707988871973229:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.384182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.384253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707988871973234:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.384416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707988871973236:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.384472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.388100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:18.398130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707988871973238:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:18.489038Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707988871973289:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:19.320208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707971692101314:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:19.320288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:20.193978Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707997461908184:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with ON 2026-01-10T12:25:20.194362Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWY0ZmM3YzQtN2UyZjcwMTQtMTU3MDcxMjktZjkyNjFhMGM=, ActorId: [1:7593707997461908175:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwh1j710p9bphjbc4ehtn, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 >> KqpBatchUpdate::ColumnTable >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> KqpBatchDelete::UnknownColumn >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 13112, MsgBus: 62498 ... waiting for SysViewsRoster update finished 2026-01-10T12:25:06.404100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:06.498308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:25:06.505810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:25:06.506215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:06.506313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:25:06.780525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:06.780641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:06.785027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047903762955 != 1768047903762958 2026-01-10T12:25:06.798666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:06.846231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:06.966560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:25:07.288514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:07.288596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:07.288635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:07.289214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:07.301484Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:07.523677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:07.581012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:07.829241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.192017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:08.461908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.173455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.173820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.174525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1974:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.174702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:09.203789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.407308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.647621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:09.878539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.204984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.470988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:10.766110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.056518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.440014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2785:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.440195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.440562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.440634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.440779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:11.445973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:11.610667Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:11.683954Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2854:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:13.283563Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037891 2026-01-10T12:25:13.283821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:1089:2899], Recipient [1:2622:4041]: {TEvReadSet step# 8001 txid# 281474976715672 TabletSource# 72075186224037923 ... uted 2026-01-10T12:25:21.377294Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit ExecuteRead 2026-01-10T12:25:21.377322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:18] at 72075186224037927 to execution unit CompletedOperations 2026-01-10T12:25:21.377350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2026-01-10T12:25:21.377404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-10T12:25:21.377432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-10T12:25:21.377461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-10T12:25:21.377494Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-10T12:25:21.377595Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-10T12:25:21.377677Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-10T12:25:21.377731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-10T12:25:21.377798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:21.377824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-10T12:25:21.377850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-10T12:25:21.377875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-10T12:25:21.377912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-10T12:25:21.377946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:21.377971Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-10T12:25:21.377997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-10T12:25:21.378021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-10T12:25:21.378147Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-10T12:25:21.378296Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-10T12:25:21.378336Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3758:4925], 1} after executionsCount# 1 2026-01-10T12:25:21.378375Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3758:4925], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:25:21.378520Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3758:4925], 1} finished in read 2026-01-10T12:25:21.378583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:21.378612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-10T12:25:21.378641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-10T12:25:21.378671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-10T12:25:21.378716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-10T12:25:21.378741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-10T12:25:21.378767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-10T12:25:21.378794Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-10T12:25:21.378870Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-10T12:25:21.379065Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3758:4925], Recipient [1:3177:4479]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-10T12:25:21.379241Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-10T12:25:21.379293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-10T12:25:21.379386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:21.379413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-10T12:25:21.379440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-10T12:25:21.379488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-10T12:25:21.379560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-10T12:25:21.379600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:21.379627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-10T12:25:21.379674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-10T12:25:21.379702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-10T12:25:21.379818Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-10T12:25:21.379962Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-10T12:25:21.379997Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3758:4925], 2} after executionsCount# 1 2026-01-10T12:25:21.380034Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3758:4925], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:25:21.380288Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3758:4925], 2} finished in read 2026-01-10T12:25:21.380347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:21.380387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-10T12:25:21.380415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-10T12:25:21.380443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-10T12:25:21.380482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-10T12:25:21.380505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-10T12:25:21.380541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-10T12:25:21.380571Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-10T12:25:21.380654Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-10T12:25:21.382612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3758:4925], Recipient [1:3171:4475]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:25:21.382680Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-10T12:25:21.383998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3758:4925], Recipient [1:3174:4477]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-10T12:25:21.384061Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-10T12:25:21.384829Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3758:4925], Recipient [1:3177:4479]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-10T12:25:21.384880Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } 2026-01-10T12:25:21.413101Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:2066:3605]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 6430, MsgBus: 19151 2026-01-10T12:25:10.328132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707954368461291:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:10.332160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.562117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.562202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.582072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.611131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.633968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.708975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.709028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.709038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.709162Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.897309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:11.128282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.179895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.332406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.337779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.474698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.542777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.439275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967253365010:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.439400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.439744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967253365020:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.439810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.734226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.765373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.798666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.829699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.880850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.924934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.002818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.122115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971548333189:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.122206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.122532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971548333194:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.122581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971548333195:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.122626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.127199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.139422Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707971548333198:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.209642Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707971548333249:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:15.328206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707954368461291:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:15.328287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.044771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.089565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.154625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.424163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 11635, MsgBus: 8402 2026-01-10T12:25:15.558152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707974401959281:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:15.560778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:15.811481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:15.859090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:15.859197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:15.913487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:15.915027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707974401959176:2081] 1768047915538138 != 1768047915538141 2026-01-10T12:25:15.933008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:16.074618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:16.074638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:16.074644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:16.074719Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:16.084715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:16.510411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:16.568263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.574311Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:16.709704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.876121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.944734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.901776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707987286862939:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.901887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.902218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707987286862949:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.902276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.198650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.236795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.269468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.308298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.343169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.390763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.434242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.505748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.593439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707991581831117:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.593507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.593726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707991581831122:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.593757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707991581831123:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.593780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:19.597372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:19.610564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707991581831126:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:19.678831Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707991581831177:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:20.557144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707974401959281:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:20.558674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:21.296511Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708000171766075:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At lambda, At function: Coalesce
:4:41: Error: At function: ==
:4:27: Error: At function: Member
:4:27: Error: Member not found: UnknownColumn 2026-01-10T12:25:21.298895Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGFiNzg1ZTQtZDY2ZTIyMmQtZmZhNGYwNzMtNDUxMDIwM2M=, ActorId: [1:7593708000171766066:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwj2g9k4nkr5eenvm4w62, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At lambda, At function: Coalesce" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 4 column: 41 } message: "At function: ==" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 27 } message: "At function: Member" end_position { row: 4 column: 27 } severity: 1 issues { position { row: 4 column: 27 } message: "Member not found: UnknownColumn" end_position { row: 4 column: 27 } severity: 1 } } } } } tx_id# trace_id# 2026-01-10T12:25:21.359101Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708000171766084:2541], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:4:43: Error: At function: KiUpdateTable!
:4:43: Error: Column 'UnknownColumn' does not exist in table '/Root/Test'., code: 2017 2026-01-10T12:25:21.360507Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGFiNzg1ZTQtZDY2ZTIyMmQtZmZhNGYwNzMtNDUxMDIwM2M=, ActorId: [1:7593708000171766066:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwj54bw75wnmcwm0jzk77, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 43 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 43 } severity: 1 issues { position { row: 4 column: 43 } message: "Column \'UnknownColumn\' does not exist in table \'/Root/Test\'." end_position { row: 4 column: 43 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink |94.6%| [TA] $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::QueryStatsSimple [GOOD] |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> KqpBatchUpdate::NotIdempotent [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession >> TPQCachingProxyTest::MultipleSessions |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 26742, MsgBus: 11519 2026-01-10T12:25:18.240244Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707988811565499:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:18.242064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:18.481646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:18.481809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:18.484551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:18.539939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:18.559830Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:18.589129Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:25:18.589416Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:25:18.624757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:18.624780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:18.624787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:18.624884Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:18.787115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:19.103798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:19.184475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.251407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:19.355158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.521812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.600412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:21.653058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708001696469220:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:21.653177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:21.653738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708001696469230:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:21.653786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.009816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.056938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.089405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.119959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.148981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.209849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.240413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.286725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.361224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708005991437396:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.361300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.361608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708005991437401:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.361633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708005991437402:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.361670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.365781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:22.377470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708005991437405:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:22.446387Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708005991437456:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:23.237615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707988811565499:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:23.237719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:24.289336Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708014581372319:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:25:24.289888Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTQ2MDNkNjMtOTIxOTlkOTQtYTQxMzgzOGUtNDVlYmIxMDM=, ActorId: [1:7593708014581372310:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxwmzkd52q7dg8ew1d82d2, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 65534, MsgBus: 25099 2026-01-10T12:24:23.238801Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707751916047434:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.238927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.346019Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707750075969901:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.346622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.360833Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707753030712452:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:23.360886Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:23.588936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.590245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.607891Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.729585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.729699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.731647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.731684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.734180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:23.734270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:23.759340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.761890Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:23.774237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.790667Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:23.807754Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:23.824036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:23.828105Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:23.829756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.836741Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:23.958614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:23.958649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:23.958660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:23.958762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:24.138663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:24.246815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.351330Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.398958Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:24.654387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:24.788917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.091181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.257092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:25.335882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.241540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769095918784:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.241680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.242191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707769095918794:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.242258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:27.584202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.631857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.697768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.768542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.820314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.892161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:27.957969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:28.066590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB fir ... NodeConnected, NodeId 17 Cookie 17 2026-01-10T12:25:11.638225Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:11.638508Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:11.646541Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:11.771024Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:11.771056Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:11.771069Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:11.771193Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:11.945136Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:11.999558Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:12.285997Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.318071Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.365172Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.425830Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:12.454445Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.583357Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.744435Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.838070Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.256750Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7593707959009373290:2122];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:16.256880Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.318734Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7593707958001724473:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:16.318815Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:16.891225Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593707980484212001:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.891339Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.891916Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593707980484212010:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.891975Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.001207Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.185131Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.254291Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.341693Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.408643Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.493804Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.595372Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.713570Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.947531Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593707984779180435:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.947657Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.947886Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593707984779180440:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.947975Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593707984779180442:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.948029Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.953905Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:17.977751Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7593707984779180444:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:18.044534Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7593707989074147817:4760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:21.755411Z node 16 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047921746, txId: 281474976710675] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::MultipleSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 10235, MsgBus: 2623 2026-01-10T12:25:19.094932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707993493911299:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:19.094999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:19.159986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:19.478574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:19.478667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:19.480409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:19.565399Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707993493911271:2081] 1768047919093749 != 1768047919093752 2026-01-10T12:25:19.565907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:19.594742Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:19.669838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:19.669869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:19.669881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:19.670002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:19.748348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:20.077669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:20.085305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:20.105194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:20.167106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:20.305728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:20.464141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:20.535718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.432020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708006378815037:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.432166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.432821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708006378815047:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.432894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.793786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.828093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.858593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.886163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.919907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.960279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.000329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.049364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.136997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708010673783217:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.137091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.137327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708010673783221:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.137339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708010673783223:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.137364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.140867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:23.153232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708010673783226:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:23.254560Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708010673783277:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:24.096570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707993493911299:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:24.096633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:24.729389Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708014968750876:2537], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-10T12:25:24.729828Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzkyYjI0ZDctOTlhNmUwNmUtY2M3Y2Q5YzUtMjJiNDJjZWU=, ActorId: [1:7593708014968750867:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwnehfgsysyf3hyvc96cn, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-10T12:25:24.752986Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708014968750880:2539], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-10T12:25:24.753629Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzkyYjI0ZDctOTlhNmUwNmUtY2M3Y2Q5YzUtMjJiNDJjZWU=, ActorId: [1:7593708014968750867:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwng019fv5nqg0q5qpga8, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-10T12:25:24.774344Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708014968750884:2541], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2026-01-10T12:25:24.775103Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzkyYjI0ZDctOTlhNmUwNmUtY2M3Y2Q5YzUtMjJiNDJjZWU=, ActorId: [1:7593708014968750867:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwngqeydjja503cvq0ra6, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 51 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 51 } severity: 1 } } tx_id# trace_id# |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] |94.6%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2026-01-10T12:25:26.615139Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:25:26.759272Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:25:26.760021Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:25:26.760151Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:25:26.760216Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:25:26.781115Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:26.781671Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-10T12:25:26.781788Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-10T12:25:26.781829Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-10T12:25:26.781912Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:293: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] |94.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2026-01-10T12:25:26.615089Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:25:26.758835Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:25:26.759710Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:25:26.759857Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:25:26.759924Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:25:26.780965Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:26.781664Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-10T12:25:26.781745Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2026-01-10T12:25:26.781883Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2026-01-10T12:25:26.615115Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:25:26.758834Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:25:26.759712Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:25:26.759886Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:25:26.759952Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:25:26.782892Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:26.782975Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-10T12:25:26.783064Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-10T12:25:26.783104Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2026-01-10T12:25:26.783134Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-10T12:25:26.783182Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2026-01-10T12:25:26.783225Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2026-01-10T12:25:26.783268Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2026-01-10T12:25:26.783305Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> TPQCachingProxyTest::TestPublishAndForget >> KqpBatchDelete::ColumnTable [GOOD] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 26228, MsgBus: 22545 2026-01-10T12:25:20.760323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707994558738412:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:20.761647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:20.788457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:21.073424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:21.073539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:21.158248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:21.227616Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:21.252226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707994558738357:2081] 1768047920741043 != 1768047920741046 2026-01-10T12:25:21.252313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:21.277255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:21.277289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:21.277296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:21.277394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:21.696621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:21.749088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:21.766853Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:21.885239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.042484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.120514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.100898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708011738609413:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.101017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.101364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708011738609423:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.101403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.440869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.474612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.517868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.548999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.584206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.619379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.655738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.708286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.786904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708011738610291:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.786995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.787119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708011738610296:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.787426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708011738610298:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.787531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.790774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:24.806491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708011738610299:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:24.903454Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708011738610353:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:25.757601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707994558738412:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:25.757678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 26201, MsgBus: 8715 2026-01-10T12:25:20.403487Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707995399830517:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:20.403549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:20.441353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:20.692441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:20.745721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:20.745853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:20.782201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:20.821918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707995399830492:2081] 1768047920401996 != 1768047920401999 2026-01-10T12:25:20.823712Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:20.972559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:20.972582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:20.972589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:20.972654Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:20.994148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:21.416483Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:21.484474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:21.540968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:21.695203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:21.870541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:21.936115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.948874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708008284734251:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.949036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.949611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708008284734261:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:23.949672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.294572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.338582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.418727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.444890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.473394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.514706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.550119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.597084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.671215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012579702431:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.671267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.671337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012579702436:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.671840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012579702438:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.671888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.674795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:24.685616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708012579702439:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:24.794649Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708012579702491:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:25.403583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707995399830517:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:25.403660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:26.692017Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708021169637361:2529], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:25:26.692519Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZGE0Y2NiMTctZTdmNWY5YzgtMjhiYzVlZDEtMTgxM2RiZTI=, ActorId: [1:7593708021169637350:2523], ActorState: ExecuteState, LegacyTraceId: 01kekxwqaxdsnv7wxqw2j4en0t, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 28666, MsgBus: 23445 2026-01-10T12:25:20.948350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707995811404242:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:20.948414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:21.289998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:21.290087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:21.368382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:21.381528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:21.404755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707995811404131:2081] 1768047920941021 != 1768047920941024 2026-01-10T12:25:21.423290Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:21.516699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:21.516720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:21.516728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:21.516857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:21.611044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:21.964254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:21.984398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:22.044288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.174426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.331351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.409835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.425785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012991275202:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.425904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.426182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012991275212:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.426215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.790545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.820182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.850952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.884385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.915274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.950147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.995165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.037585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.121893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017286243384:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.122047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.122468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017286243390:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.122502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017286243389:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.122577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.125899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:25.135686Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708017286243393:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:25.219411Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708017286243444:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:25.948046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707995811404242:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:25.948134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:26.928195Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708021581211011:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:25:26.928703Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZGUyYmFiMDgtNjc1YmNiMC1jYTI2ODE4ZC0xMmNhY2I5OQ==, ActorId: [1:7593708021581211002:2521], ActorState: ExecuteState, LegacyTraceId: 01kekxwqjh2a6p5eaggpyhq8az, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 15179, MsgBus: 17875 2026-01-10T12:25:21.113143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708000656143377:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:21.113203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:21.436216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:21.454140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:21.454245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:21.507544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:21.511143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:21.512855Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708000656143355:2081] 1768047921111939 != 1768047921111942 2026-01-10T12:25:21.613899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:21.617997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:21.618020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:21.618027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:21.618134Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:22.100047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:22.127616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:22.162880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.325954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.492336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.568901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.473090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708013541047120:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.473215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.473619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708013541047130:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.473668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.826157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.861931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.894891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.922954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.949662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.987009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.025291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.080859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.172444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017836015298:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.172559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.172884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017836015304:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.172897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708017836015303:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.172967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.177568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:25.190364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708017836015307:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:25.270682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708017836015358:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:26.113516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708000656143377:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:26.113575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:26.849286Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708022130982957:2537], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-10T12:25:26.849758Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmU1MGFiNGUtZjgzY2U4ZWItOGRkZWUzMWUtNmRhY2VhZTA=, ActorId: [1:7593708022130982948:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwqft6g2vezdz15rf0ayj, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2026-01-10T12:25:28.370336Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:25:28.452752Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:25:28.452837Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:25:28.452964Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:25:28.453044Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:25:28.470952Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:28.471103Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-10T12:25:28.471197Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-10T12:25:28.471231Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-10T12:25:28.471315Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpBatchDelete::UnknownColumn [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] |94.7%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 9877, MsgBus: 61422 2026-01-10T12:25:20.616259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707995047849745:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:20.616468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:20.887056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:20.887170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:20.940594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:20.948241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:20.952328Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707995047849642:2081] 1768047920609320 != 1768047920609323 2026-01-10T12:25:20.972551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:21.216194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:21.221230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:21.221252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:21.221265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:21.221365Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:21.622329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:21.688554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:24.091562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012227719720:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.091651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012227719731:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.091726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.095657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708012227719735:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.095769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:24.096714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:24.107835Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708012227719734:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:24.253968Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708012227719787:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:24.614531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:25:25.175610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:25:25.175610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:25:25.175819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:25:25.176121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:25:25.176224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:25:25.176306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:25:25.176321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:25:25.176406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:25:25.176478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:25:25.176511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:25:25.176573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:25:25.176615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:25:25.176626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:25:25.176703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:25:25.176728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:25:25.176763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:25:25.176838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:25:25.176841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:25:25.176951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:25:25.176956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:25:25.177010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7593708012227720354:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:25:25.177078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7593708012227720311:2341];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS ... pp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.938354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.938420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.938447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.939022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.939058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.939069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.944502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.950958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.958800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.966794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.975565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.982135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.982201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.982216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.983453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.983490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.983518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.990727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.990800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:26.990815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:27.647740Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODIwYzg1MjMtZmUxMTM0OWUtYmE5NTg5NDEtNWI4ZWE5YzM=, ActorId: [1:7593708012227719716:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxwr1n98kdy72jwfja8534, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# >> TTxAllocatorClientTest::ZeroRange >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::Boot >> TTxAllocatorClientTest::InitiatingRequest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpBatchUpdate::ColumnTable [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2026-01-10T12:25:29.713222Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:25:29.773610Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-10T12:25:29.773664Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-10T12:25:29.773755Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:25:29.773805Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-10T12:25:29.785507Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:29.785594Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2026-01-10T12:25:29.785670Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-10T12:25:29.785713Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2026-01-10T12:25:29.785758Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2026-01-10T12:25:29.785791Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2026-01-10T12:25:29.785819Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-10T12:25:29.785874Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2026-01-10T12:25:30.791751Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:25:30.794969Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:25:30.797469Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:25:30.812113Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.815838Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:25:30.835699Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835888Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.836017Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:25:30.836242Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.836322Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.836436Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:25:30.836548Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:25:30.841557Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-10T12:25:30.847633Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847706Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847804Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-10T12:25:30.847866Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 2026-01-10T12:25:30.848050Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.848219Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.848378Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.848583Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.848750Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-10T12:25:30.851110Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.851213Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.851342Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2026-01-10T12:25:30.851388Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 5000 to# 10000 2026-01-10T12:25:30.851587Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.851769Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.851958Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.852304Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-10T12:25:30.852483Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-10T12:25:30.853071Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.853143Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.853246Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2026-01-10T12:25:30.853295Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 10000 to# 15000 2026-01-10T12:25:30.853515Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2026-01-10T12:25:30.791575Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:25:30.794963Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:25:30.797477Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:25:30.812016Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.815827Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:25:30.834583Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.834733Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.834859Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:25:30.835003Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835042Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835179Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:25:30.835328Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:25:30.841500Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-10T12:25:30.847635Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847703Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847811Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-10T12:25:30.847873Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2026-01-10T12:25:30.793645Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:25:30.794981Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:25:30.797466Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:25:30.812055Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.815818Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:25:30.833394Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.833531Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.833696Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:25:30.833900Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.833945Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.834077Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:25:30.834218Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 16172, MsgBus: 25449 2026-01-10T12:25:23.481459Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708010305762758:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:23.481548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:23.718405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:23.744194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:23.744298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:23.771041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:23.813341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:23.881157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:24.009136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:24.009167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:24.009188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:24.009269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:24.444562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:24.493816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:24.523684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.641366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.788409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.853612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.766050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708023190666488:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.766188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.771734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708023190666498:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.771841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.121283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.156583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.187695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.214575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.243050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.279299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.311378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.372697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.449627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708027485634666:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.449708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.449774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708027485634671:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.449873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708027485634673:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.449916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.453571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:27.463773Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708027485634675:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:27.541809Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708027485634728:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:28.481648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708010305762758:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:28.481709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23091, MsgBus: 10188 2026-01-10T12:25:23.209198Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708008687785082:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:23.209265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:23.429842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:23.458049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:23.458175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:23.506187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:23.529092Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:23.536234Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708008687784943:2081] 1768047923203374 != 1768047923203377 2026-01-10T12:25:23.591927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:23.591978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:23.591992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:23.592069Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:23.682174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:23.971165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:23.981368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:25:24.038419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.179625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.275839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:24.321640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.389290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.384589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708021572688719:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.384710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.385244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708021572688729:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.385321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.787228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.821934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.857274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.893746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.923949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.974454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.014346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.050848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:27.122850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708025867656897:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.122980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.123112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708025867656902:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.123160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708025867656903:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.123209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:27.127490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:27.141500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708025867656906:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:27.238634Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708025867656957:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:28.209016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708008687785082:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:28.209115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:28.801860Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708030162624558:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At lambda, At function: Coalesce
:3:37: Error: At function: ==
:3:23: Error: At function: Member
:3:23: Error: Member not found: UnknownColumn 2026-01-10T12:25:28.802480Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTM3ZTUyNzgtYTI5YTc1ZTgtNTNmYmU5YjItZGM2OWNkYjA=, ActorId: [1:7593708030162624549:2531], ActorState: ExecuteState, LegacyTraceId: 01kekxwsde5mfa3dgmzy87bn07, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 31 } message: "At lambda, At function: Coalesce" end_position { row: 2 column: 31 } severity: 1 issues { position { row: 3 column: 37 } message: "At function: ==" end_position { row: 3 column: 37 } severity: 1 issues { position { row: 3 column: 23 } message: "At function: Member" end_position { row: 3 column: 23 } severity: 1 issues { position { row: 3 column: 23 } message: "Member not found: UnknownColumn" end_position { row: 3 column: 23 } severity: 1 } } } } } tx_id# trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 12719, MsgBus: 4835 2026-01-10T12:25:23.042157Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708009171595902:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:23.046267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:23.347718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:23.347825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:23.350670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:23.445941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:23.450759Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708009171595866:2081] 1768047923038520 != 1768047923038523 2026-01-10T12:25:23.456623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:23.533016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:23.533042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:23.533050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:23.533173Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:23.700197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:23.963299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:24.051218Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:26.228777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022056498647:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.228947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.229364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022056498659:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.229448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022056498660:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.229505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.234292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:26.248658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708022056498663:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:26.395019Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708022056498714:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:26.707165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:25:27.240355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:25:27.240526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:25:27.240752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:25:27.240822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:25:27.240890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:25:27.240952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:25:27.241008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:25:27.241073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:25:27.241179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:25:27.241306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:25:27.241406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:25:27.241471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:25:27.241528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7593708022056499285:2341];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:25:27.273728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:25:27.273813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:25:27.274019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:25:27.274107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:25:27.274216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:25:27.274311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:25:27.274397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:25:27.274501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:25:27.274584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[1:7593708022056499277:2337];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... pp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.086175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.086264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.086279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.088012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.088068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.088098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.091867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.091918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.091931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.095501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.095574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.095608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.097864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.097904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.097912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.103446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.108371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.108430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.108443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.110459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.110509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.110521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.113437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.113491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.113503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.118700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.123993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.124065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.124091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.126166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.126223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.126245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.130345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.130401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.130425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-10T12:25:29.772390Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzZjY2U4OGUtM2M3YjkyOTAtNGFkNTI2OWUtNzlkMWZiNWQ=, ActorId: [1:7593708022056498618:2320], ActorState: ExecuteState, LegacyTraceId: 01kekxwt1z911328dtp4bvxy39, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 16658, MsgBus: 8597 2026-01-10T12:25:11.891547Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707958747119699:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:11.891838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:11.924355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:12.138893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:12.139175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:12.141814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:12.160061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:12.210545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:12.225737Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707958747119677:2081] 1768047911890653 != 1768047911890656 2026-01-10T12:25:12.271597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:12.271640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:12.271659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:12.271785Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:12.410048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:12.715688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:12.724785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:12.779359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:12.902264Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.921215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.105341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.174522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.229697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707975926990731:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.229892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.230270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707975926990741:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.230325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.526063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.560941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.600364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.634607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.667909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.712494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.750485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.804058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.887267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707975926991610:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.887322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.887662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707975926991615:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.887714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707975926991616:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.888003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:15.892233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:15.903995Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707975926991619:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:15.977204Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707975926991672:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:16.892233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707958747119699:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:16.892298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... DB, gRPC: 61097, MsgBus: 4716 2026-01-10T12:25:22.946928Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708004231775492:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:22.946998Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:22.984151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:23.055156Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:23.058610Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708004231775465:2081] 1768047922945753 != 1768047922945756 2026-01-10T12:25:23.085738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:23.085809Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:23.087711Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:23.164698Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:23.164734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:23.164745Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:23.164847Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:23.230827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:23.606867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:23.616828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.674901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.862840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.961937Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:23.970723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.387203Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708021411646521:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.387293Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.387717Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708021411646531:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.387768Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.463017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.498716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.534191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.574837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.613539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.658918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.716646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.768182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.855411Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708021411647403:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.855520Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.855738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708021411647409:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.855784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708021411647408:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.855824Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.858617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:26.870676Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708021411647412:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:26.966829Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708021411647463:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:27.947296Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708004231775492:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:27.947369Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:28.541849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] Test command err: 2026-01-10T12:21:02.098279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706889431046292:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:02.098333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:21:02.213135Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:21:02.551999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:02.552133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:02.635605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:02.645799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:02.647281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:21:02.670552Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593706889431046150:2081] 1768047662082995 != 1768047662082998 2026-01-10T12:21:02.803410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/001992/r3tmp/yandexdOJEgG.tmp 2026-01-10T12:21:02.803447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/001992/r3tmp/yandexdOJEgG.tmp 2026-01-10T12:21:02.803616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/001992/r3tmp/yandexdOJEgG.tmp 2026-01-10T12:21:02.803716Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:02.835926Z INFO: TTestServer started on Port 21183 GrpcPort 6195 2026-01-10T12:21:02.861896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:6195 2026-01-10T12:21:03.072203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:03.107973Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:03.172658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:03.370385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-10T12:21:05.623863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706902315949068:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.623972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706902315949087:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.624110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.626831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706902315949094:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.627036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:21:05.629895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:21:05.660323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706902315949091:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:21:05.894558Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706902315949158:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:21:05.931459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:05.995624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:06.172486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:06.256986Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706902315949166:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:21:06.257993Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzVhM2JkZGQtNGFhYWU4NzAtNGExMjQ5MS05OWFjYjA2, ActorId: [1:7593706902315949054:2330], ActorState: ExecuteState, LegacyTraceId: 01kekxmrembgmbpjghp973rvm8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:21:06.260377Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593706906610916747:2823] 2026-01-10T12:21:07.100264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706889431046292:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:21:07.101177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-10T12:21:11.578730Z :Sinks_Oltp_WriteToTopicAndTable_1_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:11.597602Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:11.616620Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706928085753413:2914] connected; active server actors: 1 2026-01-10T12:21:11.617181Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:11.624157Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:11.624309Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:11.630045Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:11.630881Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:11.631053Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-10T12:21:11.631093Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:21:11.631141Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 720751862240 ... 8003132351675:3296] client test-consumer disconnected session test-consumer_14_1_6429467553187363223_v1 2026-01-10T12:25:29.626512Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:25:29.626557Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:25:29.626608Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:25:29.627035Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:25:29.627089Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:25:29.627723Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0 grpc read done: success: 0 data: 2026-01-10T12:25:29.627752Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0 grpc read failed 2026-01-10T12:25:29.627778Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0 grpc closed 2026-01-10T12:25:29.627790Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|50e9f970-cab6c88b-3a502946-f3d62e4_0 is DEAD 2026-01-10T12:25:29.628285Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:25:29.628327Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:25:29.628365Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:25:29.628604Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:25:29.628654Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:25:29.628784Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7593708003132351470:3269] destroyed 2026-01-10T12:25:29.628830Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:25:29.628879Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:25:29.628860Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.628885Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.628922Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:25:29.628902Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.628922Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.628937Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.628972Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:25:29.628982Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7593708003132351473:3269] destroyed 2026-01-10T12:25:29.629003Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2026-01-10T12:25:29.629014Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0 grpc read done: success: 0 data: 2026-01-10T12:25:29.629050Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:25:29.629032Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0 grpc read failed 2026-01-10T12:25:29.629065Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0 2026-01-10T12:25:29.629083Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|56d0cf46-8aaa0af-7e99dab2-ae74bb57_0 is DEAD 2026-01-10T12:25:29.629385Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:25:29.629423Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:25:29.629507Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593708003132351425:3261] destroyed 2026-01-10T12:25:29.629529Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:25:29.629540Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.629549Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.629558Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.629568Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.629575Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.629594Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7593708003132351428:3261] destroyed 2026-01-10T12:25:29.709927Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.709971Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.709989Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.710010Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.710029Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.714561Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.714598Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.714614Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.714659Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.714674Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.719146Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.719177Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.719189Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.719204Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.719214Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.810353Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.810411Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.810434Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.810460Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.810484Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.814926Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.814973Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.814994Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.815018Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.815036Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-10T12:25:29.819533Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:25:29.819581Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.819602Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:25:29.819625Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:25:29.819644Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] |94.7%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.443672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:03.545582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:03.554063Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:03.554534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:03.554630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:04.013789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:04.147059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.147205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.183234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:04.301878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:05.062882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:05.063918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:05.063965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:05.064009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:05.064662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:05.120548Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:05.721041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.332670Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.342671Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:09.347083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:09.422745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.422879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.472922Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:09.475773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.644514Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.645162Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.646065Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.646948Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.647078Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.647410Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.647574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.647680Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.647812Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.827906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:09.850324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.850448Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.866710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:10.039295Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:10.087828Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:10.087929Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:10.128354Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:10.130021Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:10.130230Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:10.130287Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:10.130340Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:10.130388Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:10.130438Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:10.130489Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:10.131659Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:10.251579Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.251751Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2208:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.268924Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2240:2634] 2026-01-10T12:24:10.269237Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2240:2634], schemeshard id = 72075186224037897 2026-01-10T12:24:10.477086Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2413:2743] 2026-01-10T12:24:10.477593Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:10.480127Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Describe result: PathErrorUnknown 2026-01-10T12:24:10.480184Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Creating table 2026-01-10T12:24:10.480248Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:10.482917Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2432:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.486517Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.493939Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.494035Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.511305Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:10.775922Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:11.009725Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:11.144951Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:11.145042Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Column diff is empty, finishing 2026-01-10T12:24:11.817898Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 04Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4161:3558], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:43.169654Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:24:43.169874Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:24:43.169940Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4145:3553], StatRequests.size() = 1 2026-01-10T12:24:43.170031Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... unblocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR 2026-01-10T12:24:43.291635Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:24:43.291726Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:24:43.400681Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4155:3557], schemeshard count = 1 ... waiting for stats update from SchemeShard 2026-01-10T12:24:46.421502Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:48.383347Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:48.383551Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-10T12:24:48.384059Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:24:48.384311Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:24:48.448380Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:24:48.448457Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:48.448695Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:24:48.461865Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-10T12:24:51.366007Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:53.311526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:53.312190Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2026-01-10T12:24:53.312599Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2026-01-10T12:24:53.313050Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-10T12:24:53.318912Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:104: [72075186224037894] EvServerDisconnected, pipe server id = [2:2240:2634] 2026-01-10T12:24:53.399176Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:53.413144Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:53.413239Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:4497:3683], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:53.417234Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4505:3690] 2026-01-10T12:24:53.417894Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4505:3690], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-10T12:24:56.695983Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:24:58.798044Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:24:58.798605Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 9 2026-01-10T12:24:58.798964Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:24:58.799051Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-10T12:25:01.985219Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:03.988890Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:03.989386Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 10 2026-01-10T12:25:03.989769Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:03.989855Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-10T12:25:07.115732Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:09.133522Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:09.133714Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-10T12:25:09.134278Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:09.134498Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-10T12:25:09.145369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:09.145472Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:12.577726Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:25:12.577824Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:12.577882Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:12.577932Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:25:12.820187Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:15.915448Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:15.916382Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-10T12:25:15.916893Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:15.917071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-10T12:25:20.540123Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:22.678672Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:22.679168Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 13 2026-01-10T12:25:22.679509Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:22.679605Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-10T12:25:25.843172Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:28.025240Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:28.025757Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 14 2026-01-10T12:25:28.026083Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:28.026207Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-10T12:25:28.069680Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:25:28.069749Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:28.069909Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:25:28.082900Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-10T12:25:31.429549Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:33.564562Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-10T12:25:33.564774Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 15 ... waiting for TEvPropagateStatistics (done) 2026-01-10T12:25:33.565311Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5530:4059]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:33.565691Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:25:33.565748Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5530:4059], StatRequests.size() = 1 2026-01-10T12:25:33.565833Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:33.566169Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 15 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> Yq_1::CreateQuery_With_Idempotency >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::Basic_Null |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> Yq_1::ModifyConnections >> Yq_1::Basic >> Yq_1::DeleteConnections >> Yq_1::DescribeJob >> PrivateApi::PingTask >> Yq_1::ListConnections |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TReplicaTest::Unsubscribe >> TReplicaTest::UpdateWithoutHandshake |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> Yq_1::DescribeConnection >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::UpdateWithStaleGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f0e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.q7purc6s.txt 2026-01-10T12:25:22.979619Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:25:22.979567Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:25:22.744779Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |94.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:13.954098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:14.114614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:14.133702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:14.134004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:14.134148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:14.873552Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:15.015410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:15.015559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:15.080704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:15.222890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:16.754070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:16.755353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:16.755439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:16.755476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:16.755833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:16.839081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:17.451941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:20.827154Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:20.835216Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:20.839082Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:20.870395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:20.870518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:20.911030Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:20.913185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.097271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:21.097396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:21.099208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.099894Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.100751Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.101479Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.101787Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.101913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.102070Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.102279Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.102437Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:21.118504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:21.344810Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:21.384993Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:21.385123Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:21.429844Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:21.430850Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:21.431102Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:21.431171Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:21.431228Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:21.431293Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:21.431361Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:21.431446Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:21.432070Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:21.446094Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.446214Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:21.481061Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2198:2643] 2026-01-10T12:21:21.482740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2198:2643], schemeshard id = 72075186224037897 2026-01-10T12:21:21.553372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2254:2669] 2026-01-10T12:21:21.556333Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:21.571812Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Describe result: PathErrorUnknown 2026-01-10T12:21:21.571890Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Creating table 2026-01-10T12:21:21.571986Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:21.581258Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:21.587573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:21.597992Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:21.598135Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:21.613164Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:21.822443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:21.849561Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:22.245871Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:22.332819Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:22.332926Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2260:2673] Owner: [2:2259:2672]. Column diff is empty, finishing 2026-01-10T12:21:22.963487Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... :33.724968Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4311:2461], ActorId: [2:4560:4033], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:23:33.725049Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4311:2461], ActorId: [2:4560:4033], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWIwNDUxNDgtYmQyODExNS1lZDdkMGM3NC05NjgwOGM3Yg==, TxId: 2026-01-10T12:23:33.725926Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4677:4139], ActorId: [2:4678:4140], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Y2RlMGUwOGItMTVhMmM3NGMtMWM3MzM5MGItZTE2ZjY0ZTI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:23:33.796683Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4691:4152]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:33.797050Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:23:33.797101Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4691:4152], StatRequests.size() = 1 2026-01-10T12:23:33.957679Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4677:4139], ActorId: [2:4678:4140], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2RlMGUwOGItMTVhMmM3NGMtMWM3MzM5MGItZTE2ZjY0ZTI=, TxId: 2026-01-10T12:23:33.957780Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4677:4139], ActorId: [2:4678:4140], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2RlMGUwOGItMTVhMmM3NGMtMWM3MzM5MGItZTE2ZjY0ZTI=, TxId: 2026-01-10T12:23:33.958116Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4673:4137], ActorId: [2:4677:4139], Got response [2:4678:4140] SUCCESS 2026-01-10T12:23:33.958481Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:34.007653Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:23:34.007740Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3420:3508] 2026-01-10T12:23:34.052770Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:23:34.056419Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:34.059061Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-10T12:23:34.059160Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], Start read next stream part 2026-01-10T12:23:34.098285Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4735:4175]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:34.098520Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:23:34.098550Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4735:4175], StatRequests.size() = 1 2026-01-10T12:24:42.739801Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxs9db2aq3r6s3n7zvdsdy", SessionId: ydb://session/3?node_id=2&id=M2EyY2NmZjAtMjFkMTQyNjgtOTFhODMxNzMtZDRjYjQ5NjM=, Slow query, duration: 68.677376s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-10T12:24:42.741029Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:24:42.741224Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], Start read next stream part 2026-01-10T12:24:42.741685Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-01-10T12:24:42.741806Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:24:42.742147Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:24:42.742210Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4718:4165], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjdhNzAwODMtNDA2MTQwMjMtNjBmNmYzM2EtOTRkYWJhYjU=, TxId: 2026-01-10T12:24:42.746905Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-10T12:24:42.747053Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], Start read next stream part 2026-01-10T12:24:42.844174Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4849:4277]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:42.844559Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-10T12:24:42.844615Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4849:4277], StatRequests.size() = 1 2026-01-10T12:25:35.857740Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxvcftf8c93a9z7h101942", SessionId: ydb://session/3?node_id=2&id=NWU4NTlhOC1kNzQ3NTQ1MC1kYzkxYTQxYi02ZjMyNmIyOQ==, Slow query, duration: 53.106183s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-10T12:25:35.859955Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:25:35.860064Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], Start read next stream part 2026-01-10T12:25:35.860657Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-01-10T12:25:35.860832Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4938:4360], ActorId: [2:4939:4361], Starting query actor #1 [2:4940:4362] 2026-01-10T12:25:35.860877Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4939:4361], ActorId: [2:4940:4362], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:25:35.863192Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:25:35.863248Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4708:2461], ActorId: [2:4831:4262], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTg0ZWZlMGItZjZiYTEzZDQtMmY3YWRiYTYtNTc3OWI3NzI=, TxId: 2026-01-10T12:25:35.864583Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4939:4361], ActorId: [2:4940:4362], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MmNiNDljZjEtNWU5YzFmMmQtMTc1NWJlOS1hMGI0OGJiZg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:25:35.905250Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4939:4361], ActorId: [2:4940:4362], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmNiNDljZjEtNWU5YzFmMmQtMTc1NWJlOS1hMGI0OGJiZg==, TxId: 2026-01-10T12:25:35.905317Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4939:4361], ActorId: [2:4940:4362], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmNiNDljZjEtNWU5YzFmMmQtMTc1NWJlOS1hMGI0OGJiZg==, TxId: 2026-01-10T12:25:35.905709Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4938:4360], ActorId: [2:4939:4361], Got response [2:4940:4362] SUCCESS 2026-01-10T12:25:35.906165Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:25:35.944148Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-10T12:25:35.944221Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3420:3508] |94.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2026-01-10T12:25:38.282568Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:25:38.288263Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:38.288388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-10T12:25:38.288432Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-10T12:25:38.288550Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.288671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-10T12:25:38.288726Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.288882Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:38.288918Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:38.295455Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:38.306109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:9:2056] 2026-01-10T12:25:38.306231Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:9:2056], path# path 2026-01-10T12:25:38.306327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-10T12:25:38.306378Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:25:38.306414Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:38.571365Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2026-01-10T12:25:38.284586Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:38.284674Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:806: [1:7:2054] Reject update from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-10T12:25:38.284781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-10T12:25:38.285351Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-10T12:25:38.287735Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.288448Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-10T12:25:38.289346Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-10T12:25:38.289428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-10T12:25:38.289459Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:38.289551Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.289620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-10T12:25:38.289666Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:38.564453Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:25:38.564530Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:38.564712Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:38.564759Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:814: [2:7:2054] Reject update from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-10T12:25:38.564867Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-10T12:25:38.564910Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-10T12:25:38.564983Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.565137Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-10T12:25:38.565195Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-10T12:25:38.565273Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-10T12:25:38.565325Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:38.565396Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:38.565499Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2026-01-10T12:25:38.565550Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::Update >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2026-01-10T12:25:40.042973Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:25:40.043060Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:40.043286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.043338Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.052169Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.052390Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-10T12:25:40.052512Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:40.052660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-10T12:25:40.052711Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-10T12:25:40.052775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-10T12:25:40.052836Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:40.052948Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-10T12:25:40.052995Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.344343Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:25:40.344431Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:40.344567Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.344618Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.344691Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.344783Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2026-01-10T12:25:39.991100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-10T12:25:39.991172Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-10T12:25:39.991241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:25:39.991279Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:772: [1:7:2054] Reject handshake from stale populator: sender# [1:8:2055], owner# 1, generation# 1, pending generation# 2 2026-01-10T12:25:40.268895Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-10T12:25:40.268957Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:40.269072Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-10T12:25:40.269120Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.269283Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:40.269477Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.269520Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.277778Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.278009Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-10T12:25:40.278049Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-10T12:25:40.278100Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.278207Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.278241Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.278278Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.278352Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.278414Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-10T12:25:40.278528Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.278648Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-10T12:25:40.278717Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-10T12:25:40.580854Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:25:40.580919Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:40.581059Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.581105Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.581182Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.581264Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.581315Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-10T12:25:40.581357Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.581411Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:40.581485Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-10T12:25:40.581513Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2026-01-10T12:25:40.581550Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2026-01-10T12:25:40.581625Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.581663Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-10T12:25:40.581698Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-10T12:25:40.581770Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:40.581809Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-10T12:25:40.581838Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table >> TReplicaTest::Delete [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2026-01-10T12:25:41.735092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:25:41.735188Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:978: [1:7:2054] Reject commit from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-10T12:25:41.735251Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-10T12:25:41.735301Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:42.041128Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-10T12:25:42.041177Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 0 2026-01-10T12:25:42.041247Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-10T12:25:42.041274Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:42.041336Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-10T12:25:42.041360Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:7:2054] Commit generation: owner# 1, generation# 1 2026-01-10T12:25:42.041420Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-10T12:25:42.041445Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:988: [2:7:2054] Reject commit from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-10T12:25:42.041478Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:8:2055] 2026-01-10T12:25:42.041498Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-10T12:25:42.128049Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-10T12:25:42.128366Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-10T12:25:42.128564Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-10T12:25:42.128603Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2026-01-10T12:25:42.135140Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-10T12:25:42.135303Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-10T12:25:42.135384Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:42.135501Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-10T12:25:42.135562Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:42.135665Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-10T12:25:42.135697Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2026-01-10T12:25:42.135740Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2026-01-10T12:25:42.135844Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2026-01-10T12:25:42.135877Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# 2026-01-10T12:25:42.135961Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:12:2059] 2026-01-10T12:25:42.135993Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:12:2059], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-10T12:25:42.136103Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:13:2060] 2026-01-10T12:25:42.136166Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:13:2060], path# path, domainOwnerId# 0, capabilities# |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |94.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink |94.8%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning >> TPQCDTest::TestPrioritizeLocalDatacenter >> TPQCDTest::TestUnavailableWithoutNetClassifier |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] >> KqpBatchDelete::Large_3 [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable >> KqpResultSetFormats::ValueFormat_Simple >> TSchemeShardTest::Boot >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::MkRmDir >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::InitRootAgain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:05.845733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:05.963245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:05.973576Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:05.973935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:05.974095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:06.425741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:06.556585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.556717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.594148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:06.709088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:07.448445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:07.449000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:07.449055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:07.449100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:07.449759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:07.516641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:08.083334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:14.397292Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:14.397863Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:14.402533Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:14.412903Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-10T12:24:14.416433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:14.417271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:14.491110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:14.491289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:14.492387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:14.492471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:14.544994Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:14.545270Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:24:14.548650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:14.549121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:14.755644Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:14.755737Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:14.756724Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:14.756789Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:14.770867Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:14.771698Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:14.771986Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:14.853453Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.854279Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.855126Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.855756Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.856176Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.856320Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.856589Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.856777Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:14.856930Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:15.060967Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:15.083946Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:15.084071Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:15.127489Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:15.128807Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:15.129068Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:15.129134Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:15.129191Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:15.129248Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:15.129302Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:15.129372Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:15.130564Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:15.132939Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2369:2411] 2026-01-10T12:24:15.144943Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:15.174679Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2508:2448] Owner: [2:2507:2447]. Describe result: PathErrorUnknown 2026-01-10T12:24:15.174748Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2508:2448] Owner: [2:2507:2447]. Creating table 2026-01-10T12:24:15.174852Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2508:2448] Owner: [2:2507:2447]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:15.181474Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:15.181599Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2572:2457], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:15.214774Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2622:2492] 2026-01-10T12:24:15.216611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2622:2492], schemeshard id = 72075186224037897 2026-01-10T12:24:15.239541Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2650:2500], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:15.289203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:15.336727Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2508:2448] Owner: [2:2507:2447]. TEvProposeTransacti ... = 16, ReplyToActorId = [3:5882:3267], StatRequests.size() = 1 2026-01-10T12:25:25.560737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:25.560838Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:25.641414Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-10T12:25:25.641621Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2026-01-10T12:25:25.641896Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:25:25.642039Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:25.642186Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-10T12:25:25.653489Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 17 ], ReplyToActorId[ [3:5915:3279]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:25.653778Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 17 ] 2026-01-10T12:25:25.653828Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 17, ReplyToActorId = [3:5915:3279], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-10T12:25:26.856419Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [3:5948:3289]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:26.856732Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2026-01-10T12:25:26.856783Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [3:5948:3289], StatRequests.size() = 1 2026-01-10T12:25:27.985214Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [3:5979:3299]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:27.985495Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-10T12:25:27.985541Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [3:5979:3299], StatRequests.size() = 1 2026-01-10T12:25:29.172059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:29.197113Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [3:6016:3310]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:29.197428Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-10T12:25:29.197475Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [3:6016:3310], StatRequests.size() = 1 2026-01-10T12:25:30.424427Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [3:6054:3320]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:30.424742Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-10T12:25:30.424785Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:6054:3320], StatRequests.size() = 1 2026-01-10T12:25:31.641590Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-10T12:25:31.641920Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2026-01-10T12:25:31.642449Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:31.642529Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-10T12:25:31.642658Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:25:31.664275Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:6092:3334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:31.664524Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-10T12:25:31.664556Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:6092:3334], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-10T12:25:32.889836Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:6127:3346]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:32.890077Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-10T12:25:32.890125Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:6127:3346], StatRequests.size() = 1 2026-01-10T12:25:33.994751Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:6158:3356]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:33.995051Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-10T12:25:33.995101Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:6158:3356], StatRequests.size() = 1 2026-01-10T12:25:35.254814Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:35.266247Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:6189:3366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:35.266575Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-10T12:25:35.266619Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:6189:3366], StatRequests.size() = 1 2026-01-10T12:25:36.442917Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:6222:3376]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:36.443192Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-10T12:25:36.443226Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:6222:3376], StatRequests.size() = 1 2026-01-10T12:25:37.803751Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-10T12:25:37.804262Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2026-01-10T12:25:37.804823Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:37.805054Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:37.805173Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:25:37.816506Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6259:3388]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:37.816851Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-10T12:25:37.816898Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6259:3388], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-10T12:25:39.145149Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6298:3402]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:39.145385Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-10T12:25:39.145419Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6298:3402], StatRequests.size() = 1 2026-01-10T12:25:40.390631Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6331:3414]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:40.390962Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-10T12:25:40.391016Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6331:3414], StatRequests.size() = 1 2026-01-10T12:25:41.653909Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:41.665238Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6368:3425]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:41.665580Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-10T12:25:41.665624Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6368:3425], StatRequests.size() = 1 2026-01-10T12:25:42.980981Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6402:3435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:42.981365Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-10T12:25:42.981418Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6402:3435], StatRequests.size() = 1 2026-01-10T12:25:44.161203Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:25:44.161311Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:44.161602Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-10T12:25:44.183517Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:25:44.277802Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-10T12:25:44.278222Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2026-01-10T12:25:44.278651Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-10T12:25:44.278806Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-10T12:25:44.278903Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:25:44.293359Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6436:3445]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:44.293676Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-10T12:25:44.293726Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6436:3445], StatRequests.size() = 1 row count: 7 (expected: 7) |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 12435, MsgBus: 15606 2026-01-10T12:25:09.841347Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707950374008715:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.841401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.123647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.130955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.131042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.206705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.221542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.256223Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707950374008679:2081] 1768047909833922 != 1768047909833925 2026-01-10T12:25:10.406192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.429853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.429884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.429890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.429939Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.851248Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.058367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.067339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:11.110905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.287802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.446110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.517552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.252950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967553879741:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.253078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.253547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967553879751:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.253605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.701161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.739729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.770481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.799887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.835379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.882589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.943819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.993922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.117453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971848847919:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.117558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971848847924:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.117558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.117720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707971848847926:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.117760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.121583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.131466Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707971848847927:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.203960Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707971848847979:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.844236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707950374008715:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.845398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:15.999257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... ache_undelivered_message; 2026-01-10T12:25:28.613486Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:28.711858Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:28.714218Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708029722479478:2081] 1768047928592076 != 1768047928592079 2026-01-10T12:25:28.742371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:28.742474Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:28.744151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:28.755666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:28.800658Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:28.800683Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:28.800691Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:28.800762Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:29.044183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:29.217252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:29.237727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:29.351562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:29.479580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:29.545231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:29.610773Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:31.616027Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708042607383233:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.616159Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.616387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708042607383242:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.616463Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.678997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.703448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.732475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.754946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.780425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.807631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.835690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.877593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:31.944594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708042607384110:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.944802Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708042607384115:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.944877Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.945125Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708042607384118:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.945163Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:31.948507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:31.963183Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708042607384117:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:32.051805Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708046902351466:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:33.513962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:33.594902Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708029722479592:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:33.594967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:43.693033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:43.693068Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CreateAlterTableWithCacheMode >> KqpBatchUpdate::Large_2 [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 7016, MsgBus: 27599 2026-01-10T12:25:22.857450Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708004921389523:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:22.858075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:23.092327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:23.094166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:23.094284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:23.099706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:23.209863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:23.212448Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708004921389482:2081] 1768047922855851 != 1768047922855854 2026-01-10T12:25:23.274087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:23.274109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:23.274119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:23.274216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:23.274306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:23.691352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:23.748741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.866068Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:23.935345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.093285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:24.162696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.171430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022101260540:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.171548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.171865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022101260550:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.171949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.526330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.559973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.589515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.631110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.664286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.713008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.751709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.802885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.878912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022101261421:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.878984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.879248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022101261427:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.879276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022101261426:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.879283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.883239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:26.894457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708022101261430:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:26.990700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708022101261481:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:27.857626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708004921389523:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:27.857692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:28.389550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... 067166325716:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:36.233585Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:36.247125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:36.323817Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:36.325296Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708067166325687:2081] 1768047936232497 != 1768047936232500 2026-01-10T12:25:36.360964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:36.361061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:36.365827Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:36.403495Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:36.403528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:36.403535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:36.403606Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:36.525484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:36.816586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:36.823848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:36.833267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:36.900283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:37.081776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:37.156448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:37.335412Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.812243Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708080051229443:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.812350Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.812831Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708080051229453:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.812900Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.894947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:39.938049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:39.981718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.022813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.067371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.115502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.157699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.226123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.329873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708084346197625:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.329998Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.330374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708084346197630:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.330430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708084346197631:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.330475Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.335002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:40.354454Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708084346197634:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:40.411509Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708084346197687:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:41.233668Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708067166325716:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:41.233763Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:42.324502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2026-01-10T12:25:45.039672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708105666443003:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:45.039732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:45.341992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:45.383111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:45.383266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:45.441191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:45.447707Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708105666442977:2081] 1768047945037305 != 1768047945037308 2026-01-10T12:25:45.458574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:45.634073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:45.672673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002118/r3tmp/yandexKlI5Fk.tmp 2026-01-10T12:25:45.672766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002118/r3tmp/yandexKlI5Fk.tmp 2026-01-10T12:25:45.674469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002118/r3tmp/yandexKlI5Fk.tmp 2026-01-10T12:25:45.674600Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:29348 2026-01-10T12:25:46.064372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:46.067892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:46.087625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:46.102198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:25:48.128125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118551345834:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.130832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118551345810:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.131080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.131742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118551345860:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.131917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.134607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:48.148595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708118551345837:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:25:48.299923Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708118551345904:2588] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:48.542734Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708118551345912:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:48.548722Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjgwMDZlMzYtNGE1MTUyZi1lMzhmNzJlZi04ZjVhMzc3Yg==, ActorId: [1:7593708118551345796:2324], ActorState: ExecuteState, LegacyTraceId: 01kekxxcahdmm9xzpcfqytfamh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:48.555633Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:48.590031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.734174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.841484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:25:50.040206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708105666443003:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:50.040298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2026-01-10T12:25:45.032595Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708105731781846:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:45.032731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:45.337654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:45.368778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:45.368957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:45.411091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:45.489579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:45.594851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:45.672681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002115/r3tmp/yandexsZJuK9.tmp 2026-01-10T12:25:45.672720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002115/r3tmp/yandexsZJuK9.tmp 2026-01-10T12:25:45.674468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002115/r3tmp/yandexsZJuK9.tmp 2026-01-10T12:25:45.674580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:46.046651Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:7704 2026-01-10T12:25:46.081170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:46.092641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:46.106273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:25:48.327826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118616684640:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.327910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118616684662:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.327993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.329536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708118616684669:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.329652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.333140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:48.345406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708118616684668:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:25:48.647410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.649822Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708118616684735:2590] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:48.757345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.865208Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708118616684789:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:48.866492Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzUyNDNlYzYtNzUwNmRkOTUtZWQzMWQ2M2ItODdhMDc3MGE=, ActorId: [1:7593708118616684636:2323], ActorState: ExecuteState, LegacyTraceId: 01kekxxch57sp8cbc9pc80h632, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:48.870567Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:48.873812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:25:50.032782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708105731781846:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:50.032861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardTest::CreateAlterTableWithCacheMode [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2026-01-10T12:25:45.033123Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708102788993681:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:45.033210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:45.374976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:45.396721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:45.396822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:45.433393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:45.469787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:45.472275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708102788993628:2081] 1768047945017715 != 1768047945017718 2026-01-10T12:25:45.646061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:45.672691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:45.672723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:45.672733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:45.672806Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:46.041185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:19527 2026-01-10T12:25:46.080984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:46.130521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-10T12:25:48.035713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708115673896478:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.035853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708115673896491:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.035921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.036702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708115673896494:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.036769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.043174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:48.058250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708115673896493:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:25:48.178048Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708115673896562:2593] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:48.542614Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708115673896577:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:48.548619Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDczZjQwYjctYTAwYWVmY2UtZmU4YWExMDgtMTAwMThhMGY=, ActorId: [1:7593708115673896476:2324], ActorState: ExecuteState, LegacyTraceId: 01kekxxc7fe34833pfthy9txek, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:48.552848Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:48.590031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.716485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.823430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:25:50.033278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708102788993681:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:50.033354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> TPQCDTest::TestDiscoverClusters [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2026-01-10T12:25:45.041299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708104003585865:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:45.041372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:45.345604Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:45.430761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:45.430884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:45.432541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:45.476915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:45.481413Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708099708618449:2081] 1768047945017691 != 1768047945017694 2026-01-10T12:25:45.592002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:45.672728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002114/r3tmp/yandexHuNSqa.tmp 2026-01-10T12:25:45.672768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002114/r3tmp/yandexHuNSqa.tmp 2026-01-10T12:25:45.674487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002114/r3tmp/yandexHuNSqa.tmp 2026-01-10T12:25:45.674642Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:46.045486Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:3803 2026-01-10T12:25:46.073301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:46.080713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:46.096049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:25:48.096471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708116888488590:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.096635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708116888488598:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.096695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.100498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708116888488606:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.100590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:48.103951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:48.122013Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708116888488605:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:25:48.480563Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708116888488672:2587] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:48.590363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.647973Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708116888488680:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:48.650239Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzM3MjVmOWYtMjRkYWM0YTAtNmZlZTVlNmItYzUwMzk0YjA=, ActorId: [1:7593708116888488588:2325], ActorState: ExecuteState, LegacyTraceId: 01kekxxc9y1jqryzyzwgpbgebm, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:48.653133Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:48.765808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.877937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:25:50.042467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708104003585865:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:50.042568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27023, MsgBus: 27747 2026-01-10T12:24:55.860863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707888255443228:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.860941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.099195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.112009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.112125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.137393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.169771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.173689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707888255443180:2081] 1768047895857223 != 1768047895857226 2026-01-10T12:24:56.297212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.303080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.303110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.303123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.303218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.666351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:56.867154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.416380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140345955:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.416470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.416727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140345965:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.416780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.519390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.617788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140346061:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.617885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.618142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140346063:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.618208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.634438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.671038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140346141:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.671132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140346146:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.671134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.671281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901140346148:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.671350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.674148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.682502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707901140346149:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:24:58.823000Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707901140346201:2642] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15937, MsgBus: 25933 2026-01-10T12:24:59.905619Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707906155247742:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:59.905666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:59.918294Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:59.989466Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:59.991082Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593707906155247701:2081] 1768047899904633 != 1768047899904636 2026-01-10T12:25:00.017139Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:00.017214Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:00.021224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:00.049993Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:00.050014Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:00.050022Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:00.050094Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:00.119382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:00.396864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:00.912514Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:02.745816Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707919040150477:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.745835Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetc ... s: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:47.678078Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:47.678470Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708112198915509:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:47.678565Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:47.683247Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:47.697974Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7593708112198915508:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:47.784656Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7593708112198915561:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:47.869068Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7593708112198915578:2333], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text"
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-10T12:25:47.869525Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=YjQ1MWZlMWQtNjMwMzEyZGItOWQ5MzkxY2QtNzJlNzAwMmI=, ActorId: [10:7593708112198915492:2323], ActorState: ExecuteState, LegacyTraceId: 01kekxx8c5frc36nfc6bzf1m48, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:48.049724Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7593708095019045465:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:48.049834Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17402, MsgBus: 32005 2026-01-10T12:25:48.922805Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7593708115277143457:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:48.924282Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:48.957507Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:49.120228Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7593708115277143411:2081] 1768047948920810 != 1768047948920813 2026-01-10T12:25:49.132414Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:49.134387Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:49.134495Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:49.138130Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:49.220579Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:49.300907Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:49.300936Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:49.300947Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:49.301051Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:49.933022Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:50.017704Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:50.024317Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:53.684473Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708136751980777:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:53.684627Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:53.684970Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708136751980791:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:53.685490Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708136751980793:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:53.685561Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:53.691794Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:53.706019Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593708136751980794:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:53.809991Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593708136751980846:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:53.838646Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7593708136751980861:2333], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-10T12:25:53.840636Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NTIzMjZkNi1jZTI5MjUyOC05ZmY3YWM5Zi05ZDMxYTAzZA==, ActorId: [11:7593708136751980761:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxxe6y5p2936z9mcf45n0m, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-10T12:25:53.922895Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593708115277143457:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:53.922999Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit |94.8%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TScaleRecommenderTest::BasicTest >> THiveTest::TestHiveBalancer >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> THiveTest::TestServerlessMigration >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableById >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3764, MsgBus: 64070 2026-01-10T12:24:55.860992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707887013122303:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.861071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.134609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.134762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.195493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.233404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.233781Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.234962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707887013122258:2081] 1768047895857493 != 1768047895857496 2026-01-10T12:24:56.306491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.306513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.306526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.306686Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.479559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.667182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:56.869081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.550647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707899898025044:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.550661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707899898025032:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.550752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.551020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707899898025048:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.551074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.554257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.563723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707899898025047:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:24:58.700565Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707899898025100:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:58.745847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22865, MsgBus: 9437 2026-01-10T12:24:59.737153Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707908038297283:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:59.737245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:59.749445Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:59.824944Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593707908038297246:2081] 1768047899736166 != 1768047899736169 2026-01-10T12:24:59.840235Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:24:59.840288Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:24:59.846931Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:59.852144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:59.852243Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:59.869966Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:59.869993Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:59.870002Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:59.870109Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:59.878026Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:00.002456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:00.220572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:00.225479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:25:00.742907Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:03.207789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707925218167310:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:03.207789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707925218167318:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:03.207877Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:03.208154Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707925218167325:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:03.208226Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:03.211444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:03.243431Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593707925218167324:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:03.315315Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593707925218167377:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:03.341706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 7205 ... RN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708120186255917:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:49.323932Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:49.324263Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708120186255919:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:49.324343Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:49.336969Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:49.361432Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7593708120186255920:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:49.443874Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7593708120186255972:2542] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:49.482680Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:49.611038Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:49.755687Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7593708120186256209:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-10T12:25:49.756652Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=ZjkzMjU0YzctMmRkOTMyOTgtYzViMDAyYjktOWNlNzRkMjI=, ActorId: [10:7593708120186256207:2355], ActorState: ExecuteState, LegacyTraceId: 01kekxxdwadknraxnv7xqv3stq, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 8100, MsgBus: 22931 2026-01-10T12:25:51.274792Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7593708131269912442:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:51.274908Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:51.301622Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:51.417029Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:51.417149Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:51.430270Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:51.441919Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:25:51.444191Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:25:51.453888Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:51.505387Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:51.505415Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:51.505426Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:51.505552Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:51.520073Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:52.179978Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:52.281851Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:56.134397Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708152744749759:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:56.134565Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:56.134570Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708152744749771:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:56.137449Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708152744749773:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:56.137677Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:56.144670Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:56.174026Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593708152744749774:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:25:56.235019Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593708152744749826:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:56.280219Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593708131269912442:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:56.280333Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:56.289092Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:56.421436Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:56.565285Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7593708152744750064:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-10T12:25:56.565777Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=MTczZDIxYTAtNjczN2JhYTMtMTIwOTNiZDgtYzg3MWY3NzY=, ActorId: [11:7593708152744750062:2355], ActorState: ExecuteState, LegacyTraceId: 01kekxxmg73nwah7f2h1c2mbzj, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:27.696547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:27.818685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:27.827067Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:27.827517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:27.827631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:28.226905Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:28.331446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:28.331582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:28.365634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:28.443708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:29.087414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:29.088177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:29.088213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:29.088236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:29.088658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:29.152758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:29.699492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:32.787701Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:32.796159Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:32.799989Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:32.834838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:32.834954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:32.874871Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:32.877751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:33.092449Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.092999Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.093465Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094016Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094104Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094240Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094355Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094418Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.094495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:33.232322Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:33.287543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:33.287669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:33.301063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:33.421199Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:33.453674Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:33.453768Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:33.479500Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:33.480783Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:33.481032Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:33.481085Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:33.481128Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:33.481170Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:33.481218Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:33.481261Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:33.481892Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:33.598726Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:33.598831Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:33.614244Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:21:33.614511Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:21:33.785513Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:21:33.786007Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:33.788720Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:21:33.788786Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:21:33.788882Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:33.792682Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:33.798620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:33.807440Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:33.807578Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:33.824804Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:34.059892Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:34.291803Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:34.414302Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:34.414400Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:21:35.045299Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... EvStatistics::TEvSaveStatisticsQueryResponse, TBasicString> const&, NKikimr::TPathId co... 2026-01-10T12:23:47.714658Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4527:4003], ActorId: [2:4528:4004], Got response [2:4529:4005] SUCCESS 2026-01-10T12:23:47.715112Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:23:47.716404Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3205:3450] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-10T12:23:47.716475Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-10T12:23:47.716528Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-10T12:23:47.752943Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:23:47.753012Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3205:3450] 2026-01-10T12:23:47.753054Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-10T12:23:47.753250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:23:47.753891Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:23:47.757422Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:23:47.757554Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], Start read next stream part 2026-01-10T12:23:47.804562Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4576:4040]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:23:47.804857Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:23:47.804919Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4576:4040], StatRequests.size() = 1 2026-01-10T12:24:55.664059Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxspsd2nh6seyppkc5kp7j", SessionId: ydb://session/3?node_id=2&id=N2NkODRhM2YtNzA1MTg1ZjUtYTVhZjI1Mi05YzZkYThkZg==, Slow query, duration: 67.903082s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:24:55.665135Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:24:55.665347Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], Start read next stream part 2026-01-10T12:24:55.665678Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33190, txId: 18446744073709551615] shutting down 2026-01-10T12:24:55.665763Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:24:55.667969Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:24:55.668026Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4565:4030], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzI5MWEwMWEtNjFkODQ5ODAtNjM2NzZlOS1lYjU5ZDFmYw==, TxId: 2026-01-10T12:24:55.669369Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:24:55.669507Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], Start read next stream part 2026-01-10T12:24:55.727806Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4686:4139]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:55.728023Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-10T12:24:55.728069Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4686:4139], StatRequests.size() = 1 2026-01-10T12:25:50.422599Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxvs3ndt6a8mmz1m132pa9", SessionId: ydb://session/3?node_id=2&id=ZGM3NzQ0NS0xOTU3M2NmLWYwYmUyODcwLTczN2Y2OGU=, Slow query, duration: 54.749942s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:25:50.425221Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:25:50.425388Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], Start read next stream part 2026-01-10T12:25:50.425889Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33190, txId: 18446744073709551615] shutting down 2026-01-10T12:25:50.426321Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:25:50.426382Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4563:2470], ActorId: [2:4668:4124], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjJjZGU2M2ItOTdkNWFjOTMtMWFiNGE1ODktZjI4OTljMjE=, TxId: 2026-01-10T12:25:50.426761Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4775:4222], ActorId: [2:4777:4224], Starting query actor #1 [2:4778:4225] 2026-01-10T12:25:50.426837Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4777:4224], ActorId: [2:4778:4225], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:25:50.431136Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4777:4224], ActorId: [2:4778:4225], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzgwZWRhYzAtMmRmNjJiOTAtMjlmZDMwNTEtNjUwZDVkMDg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:25:50.494203Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4777:4224], ActorId: [2:4778:4225], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzgwZWRhYzAtMmRmNjJiOTAtMjlmZDMwNTEtNjUwZDVkMDg=, TxId: 2026-01-10T12:25:50.494306Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4777:4224], ActorId: [2:4778:4225], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzgwZWRhYzAtMmRmNjJiOTAtMjlmZDMwNTEtNjUwZDVkMDg=, TxId: 2026-01-10T12:25:50.494650Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4775:4222], ActorId: [2:4777:4224], Got response [2:4778:4225] SUCCESS 2026-01-10T12:25:50.495362Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:25:50.522032Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:25:50.522114Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3205:3450] 2026-01-10T12:25:50.581001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-10T12:25:50.581098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-10T12:25:50.641498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4304:3799], schemeshard count = 1 2026-01-10T12:25:55.165083Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:55.165337Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-10T12:25:55.166001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-10T12:25:55.255602Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:25:55.255707Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:25:55.256011Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:25:55.270969Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestCreateTablet >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TScaleRecommenderTest::BasicTest [GOOD] >> TScaleRecommenderTest::RollingRestart >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> THiveTest::TestServerlessMigration [GOOD] >> THiveTest::TestUpdateChannelValues >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [GOOD] >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> KqpQuery::PreparedQueryInvalidate >> KqpLimits::KqpMkqlMemoryLimitException >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query >> KqpStats::MultiTxStatsFullExpYql >> KqpExplain::ExplainStream >> TScaleRecommenderTest::RollingRestart [GOOD] >> TScaleRecommenderTest::RollingRestartNoLastRecommendation >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestNoMigrationToSelf >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> THiveTest::TestDrain >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> Yq_1::Basic [GOOD] >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Basic_EmptyList >> Yq_1::Create_And_Modify_The_Same_Connection >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TScaleRecommenderTest::RollingRestartNoLastRecommendation [GOOD] >> TStorageBalanceTest::TestScenario1 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> THiveImplTest::BootQueueSpeed >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> KqpBatchUpdate::Large_3 [GOOD] >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 26663, MsgBus: 18697 2026-01-10T12:25:09.840071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707947567934636:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.840165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.166405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.166509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.191242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.214642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.216033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.216358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707947567934602:2081] 1768047909834230 != 1768047909834233 2026-01-10T12:25:10.414497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.430056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.430085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.430090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.430162Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.851950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.062408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.132698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.289593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.467375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.541424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.147712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707964747805660:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.147861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.149688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707964747805670:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.149772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.734217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.766874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.796849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.824578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.871445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.963226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.008201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.100168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969042773838:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.100278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.100606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969042773844:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.100643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969042773843:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.100670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.103896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.115096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-10T12:25:14.115525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707969042773847:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:14.226351Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707969042773900:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.844235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707947567934636:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.844318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:15.998289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... 99536658146131:7762515]; 2026-01-10T12:25:36.041275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:36.053648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:36.117929Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:36.129152Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708062963557309:2081] 1768047936040244 != 1768047936040247 2026-01-10T12:25:36.203367Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:36.203451Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:36.207112Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:36.248432Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:36.248456Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:36.248462Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:36.248529Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:36.268718Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:36.682124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:36.690988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:36.749223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:36.927992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:37.004668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:37.052769Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.973333Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708075848461069:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.973457Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.974554Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708075848461079:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:39.974630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.065567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.139735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.188579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.237992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.280032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.332993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.381781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.458446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:40.568919Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708080143429249:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.569003Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.569064Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708080143429254:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.569287Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708080143429256:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.569333Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:40.572772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:40.585536Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708080143429257:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:40.648997Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708080143429309:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:41.044191Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708062963557339:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:41.044271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:42.825119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.110990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:51.111020Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestNotEnoughResources >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> THiveTest::TestNotEnoughResources [GOOD] >> THiveTest::TestRestartTablets >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDownAfterDrain >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveImplTest::BootQueueConfigurePriorities [GOOD] >> THiveTest::TestBlockCreateTablet >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::MultiTxStatsFullYql >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDrainAndReconnect >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert [GOOD] >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> THiveTest::TestDrainAndReconnect [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> PrivateApi::Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2026-01-10T12:25:38.058967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708074286250943:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.059035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:38.656274Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.670670Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.671008Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] E0110 12:25:38.699306054 456797 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.699449852 456797 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.709454Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.767099Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.767289Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.767323Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.771466Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.771623Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.779633Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.815817Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.823907Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.850141Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.850290Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.860382Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.871792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:38.946288Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.956898Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:38.982418Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10570 2026-01-10T12:25:39.040306Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.040389Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.051839Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.051859Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.070351Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.070558Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.071768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.074826Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.093157Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.094583Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.114194Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10570 } ] 2026-01-10T12:25:39.114315Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhos ... 77\377\001`\000h\332\247\200\200\200\200@p\004z\000z\000\240\001\000\270\001\000\300\001\001\312\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2026-01-10T12:26:18.381848Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2026-01-10T12:26:18.381972Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7593708246718297348 RawX2: 4503616807242029 } } DstEndpoint { ActorId { RawX1: 7593708246718297344 RawX2: 4503616807242029 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:18.382059Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:454: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Shards State: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 } 2026-01-10T12:26:18.382166Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:521: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Sending TEvResolveKeySet update for table 'Root/yq/idempotency_keys', range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; (String : yandexcloud://Execute_folder_id, String : idempotency_key)], attempt #1 2026-01-10T12:26:18.382235Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-01-10T12:26:18.382253Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:18.382293Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-10T12:26:18.382308Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:18.382326Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:18.382340Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-01-10T12:26:18.382352Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:18.382367Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Received TEvResolveKeySetResult update for table 'Root/yq/idempotency_keys' 2026-01-10T12:26:18.382428Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Processing resolved ShardId# 72075186224037897, partition range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; ()), i: 0, state ranges: 0, points: 1 2026-01-10T12:26:18.382443Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Add point to new shardId: 72075186224037897 2026-01-10T12:26:18.382542Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Pending shards States: TShardState{ TabletId: 72075186224037897, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:18.382567Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:18.382578Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. BEFORE: 1.0 2026-01-10T12:26:18.382618Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Send EvRead to shardId: 72075186224037897, tablePath: Root/yq/idempotency_keys, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1768047978403), lockTxId = 281474976715738, lockNodeId = 4 2026-01-10T12:26:18.382656Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. AFTER: 0.1 2026-01-10T12:26:18.382667Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:18.383273Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715738 DataShard: 72075186224037897 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 52, BrokenTxLocks= 2026-01-10T12:26:18.383314Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Taken 1 locks 2026-01-10T12:26:18.383331Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:18.383355Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-01-10T12:26:18.383372Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-10T12:26:18.383388Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:18.383423Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:18.383447Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388586 2026-01-10T12:26:18.383468Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. returned 1 rows; processed 1 rows 2026-01-10T12:26:18.383513Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. dropping batch for read #0 2026-01-10T12:26:18.383524Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:18.383547Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:18.383563Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715738, task: 1, CA Id [4:7593708246718297348:2349]. returned async data processed rows 1 left freeSpace 8388586 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:18.383684Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708246718297348:2349], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxy9wa53p3km07v3p6smpk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODk0ZWU4NmItZDYxOGI5MDItYjM4MDVhYWMtMjBjYjJiMzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:18.383879Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715738;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:18.403671Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:26:18.403701Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> Yq_1::Basic_TaggedLiteral [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> TSchemeShardTest::PreserveColumnOrder >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2026-01-10T12:25:38.266623Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708073390863956:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.274016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0110 12:25:38.607271783 457040 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.607406184 457040 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.853880Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.020867Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.020981Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.029868Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.084518Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.084603Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.084950Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26134 2026-01-10T12:25:39.193246Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.193356Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.193596Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.193631Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.194732Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.201243Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.265862Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.266113Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.266168Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.294872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.303046Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.303157Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.303776Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.308384Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.311928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.312131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.380298Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.426872Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.448017Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.489055Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.489747Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.495107Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.495592Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26134 } ] 2026-01-10T12:25:39.504209Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26134: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on ... &id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:19.363160Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7593708251700881832:2602], TxId: 281474976710683, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:19.363174Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7593708251700881833:2603], TxId: 281474976710683, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:19.363175Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7593708251700881832:2602], TxId: 281474976710683, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:19.363210Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Received TEvResolveKeySetResult update for table 'Root/yq/nodes' 2026-01-10T12:26:19.363271Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Processing resolved ShardId# 72075186224037893, partition range: [(String : TestTenant, Uint32 : NULL) ; ()), i: 0, state ranges: 1, points: 0 2026-01-10T12:26:19.363336Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:655: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Compare range #0 [(String : TestTenant, Uint32 : NULL) ; (String : TestTenant)] with partition range [(String : TestTenant, Uint32 : NULL) ; ()) : 0 2026-01-10T12:26:19.363379Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:662: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Add range to new shardId: 72075186224037893, range: [(String : TestTenant, Uint32 : NULL) ; (String : TestTenant)] 2026-01-10T12:26:19.363514Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Pending shards States: TShardState{ TabletId: 72075186224037893, Last Key , Ranges: [#0: [(String : TestTenant, Uint32 : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, Uint32 : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:19.363530Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:19.363539Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. BEFORE: 1.0 2026-01-10T12:26:19.363623Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/nodes, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1768047979250), lockTxId = 281474976710683, lockNodeId = 7 2026-01-10T12:26:19.363678Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. AFTER: 0.1 2026-01-10T12:26:19.363695Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:19.365122Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976710683 DataShard: 72075186224037893 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 51, BrokenTxLocks= 2026-01-10T12:26:19.365151Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Taken 1 locks 2026-01-10T12:26:19.365167Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:19.365191Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708251700881832:2602], TxId: 281474976710683, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:19.365211Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708251700881832:2602], TxId: 281474976710683, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:19.365230Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:19.365248Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:19.365295Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388508 2026-01-10T12:26:19.365326Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. returned 1 rows; processed 1 rows 2026-01-10T12:26:19.365370Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. dropping batch for read #0 2026-01-10T12:26:19.365392Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:19.365409Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:19.365426Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710683, task: 1, CA Id [7:7593708251700881832:2602]. returned async data processed rows 1 left freeSpace 8388508 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:19.365667Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7593708251700881832:2602], TxId: 281474976710683, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:19.365878Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710683;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:19.366104Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708251700881833:2603], TxId: 281474976710683, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:19.366254Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7593708251700881833:2603], TxId: 281474976710683, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyatt828400p95383nm03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MzBjMmMwMDgtNjk4NTE5ZWUtY2Y4MmNiODMtYTU4Yzg0MTQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:19.366358Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710683;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:19.389930Z node 7 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: CLIENT_CANCELLED
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:8940 2026-01-10T12:26:19.397426Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:601: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): CANCELLED } {
: Error: Grpc error response on endpoint localhost:8940 } ], Query: --!syntax_v1 -- Query name: NodesHealthCheck(write) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $node_id as Uint32; DECLARE $instance_id as String; DECLARE $hostname as String; DECLARE $deadline as Timestamp; DECLARE $active_workers as Uint64; DECLARE $memory_limit as Uint64; DECLARE $memory_allocated as Uint64; DECLARE $ic_port as Uint32; DECLARE $node_address as String; DECLARE $data_center as String; UPSERT INTO `nodes` (`tenant`, `node_id`, `instance_id`, `hostname`, `expire_at`, `active_workers`, `memory_limit`, `memory_allocated`, `interconnect_port`, `node_address`, `data_center`) VALUES ($tenant ,$node_id, $instance_id, $hostname, $deadline, $active_workers, $memory_limit, $memory_allocated, $ic_port, $node_address, $data_center); 2026-01-10T12:26:19.398034Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: NodesHealthCheckRequest - NodesHealthCheckResult: {tenant: "TestTenant" node { node_id: 7 instance_id: "d59043b3-43222281-9e5e60b5-41318e59" hostname: "ghrun-tjszgfszoq" node_address: "127.0.1.1" } } ERROR: [ {
: Error: GRpc error: (1): CANCELLED } {
: Error: Grpc error response on endpoint localhost:8940 } ] 2026-01-10T12:26:19.399655Z node 7 :YQL_NODES_MANAGER ERROR: nodes_health_check.cpp:65: Failed with code: INTERNAL_ERROR Details:
: Error: Can't do NodesHealthCheck: (yexception) ydb/core/fq/libs/actors/nodes_health_check.cpp:95:
: Error: GRpc error: (1): CANCELLED
: Error: Grpc error response on endpoint localhost:8940 |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [GOOD] >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] Test command err: Trying to start YDB, gRPC: 21018, MsgBus: 7185 ... waiting for SysViewsRoster update finished 2026-01-10T12:20:48.970638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:20:49.095959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:20:49.106181Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:20:49.106583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:20:49.106759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:20:49.669731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:49.669842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:49.675633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047645958093 != 1768047645958096 2026-01-10T12:20:49.693405Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:49.739125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:49.873080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:20:50.345313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:20:50.345374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:20:50.345406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:20:50.345891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:50.365506Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:50.858663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:50.938639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:50.938831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:50.938912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:50.939872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:50.940107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:50.944801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:51.020133Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:932:2791], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:20:51.138072Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:51.428348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-10T12:20:51.654628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:20:51.654904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:20:51.655130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:20:51.655255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:20:51.655343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:20:51.655493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:20:51.655617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:20:51.655736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:20:51.655828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:20:51.655939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:20:51.657204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:20:51.657403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:20:51.657526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1115:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:20:51.683432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:20:51.683528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:20:51.683661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:20:51.683709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:20:51.683910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:20:51.683960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:20:51.684117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:20:51.684184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:20:51.684263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:20:51.684301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLA ... s_local=44;result=not_found; 2026-01-10T12:26:17.363947Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.364071Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.364149Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.375041Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.375138Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.375180Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.385739Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.385832Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.385871Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.397483Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.397580Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.397626Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.407927Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.408019Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.408059Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.418286Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.418378Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.418422Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.430282Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.430373Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.430417Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.441066Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.441165Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.441207Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.452816Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.452943Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.452999Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.465138Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.465247Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.465292Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.476410Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.476506Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.476548Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.487240Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.487366Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.487424Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-10T12:26:17.616654Z node 14 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxxesachmh830v77q6jhpa", SessionId: ydb://session/3?node_id=14&id=NDJhMTYwZmMtNDA4MDQ5OGEtNjBiMmQ5MS1iYzEzZTA0NA==, Slow query, duration: 26.978024s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-10T12:26:17.797238Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:8675:8046], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:50: Error: Bad "DefaultTxMode" setting for "$all" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO] 2026-01-10T12:26:17.801239Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=Y2U5YWEzNzAtZWMwNDcxYzItMTg4MzQ5ZTEtZmQ2ZTNiMDY=, ActorId: [14:8666:8037], ActorState: ExecuteState, LegacyTraceId: 01kekxy99bd06rhfzm3t7c88qe, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 2 column: 50 } message: "Bad \"DefaultTxMode\" setting for \"$all\" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO]" end_position { row: 2 column: 50 } severity: 1 } } tx_id# trace_id# |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2026-01-10T12:25:38.088649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708074842390866:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.088821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0110 12:25:38.487208041 456790 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.487371933 456790 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.491308Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.616341Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.666355Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.684345Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.685134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:38.715680Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.715760Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.723321Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.732266Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.752987Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.759370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.759495Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.771455Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8560 2026-01-10T12:25:38.778949Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.779048Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.779138Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.801590Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.806947Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.812054Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.834558Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.869434Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.889178Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.897241Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.910304Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.925422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:38.939083Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.943396Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.951240Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.967137Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.991208Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8560: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8560 } ] 2026-01-10T12:25:38.991293Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): conn ... db://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:20.250709Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708254117659362 RawX2: 4503616807242572 } } DstEndpoint { ActorId { RawX1: 7593708254117659363 RawX2: 4503616807242573 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708254117659363 RawX2: 4503616807242573 } } DstEndpoint { ActorId { RawX1: 7593708254117659358 RawX2: 4503616807242254 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:20.250734Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:20.250746Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:20.250768Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:20.250785Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:20.250795Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:20.250839Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Received TEvResolveKeySetResult update for table 'Root/yq/pending_small' 2026-01-10T12:26:20.250925Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Processing resolved ShardId# 72075186224037896, partition range: [(String : TestTenant, String : NULL, String : NULL) ; ()), i: 0, state ranges: 1, points: 0 2026-01-10T12:26:20.251007Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:655: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Compare range #0 [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] with partition range [(String : TestTenant, String : NULL, String : NULL) ; ()) : 0 2026-01-10T12:26:20.251064Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:662: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Add range to new shardId: 72075186224037896, range: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] 2026-01-10T12:26:20.251200Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Pending shards States: TShardState{ TabletId: 72075186224037896, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:20.251219Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:20.251227Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. BEFORE: 1.0 2026-01-10T12:26:20.251326Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Send EvRead to shardId: 72075186224037896, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-10T12:26:20.251368Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. AFTER: 0.1 2026-01-10T12:26:20.251381Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:20.254606Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:20.254633Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Taken 0 locks 2026-01-10T12:26:20.254650Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:20.254677Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708254117659362:2892], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-01-10T12:26:20.254697Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708254117659362:2892], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-10T12:26:20.254715Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:20.254731Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:20.254746Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:20.254760Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. returned 0 rows; processed 0 rows 2026-01-10T12:26:20.254796Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. dropping batch for read #0 2026-01-10T12:26:20.254809Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:20.254822Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:20.254840Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715728, task: 1, CA Id [4:7593708254117659362:2892]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:20.254905Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708254117659362:2892], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:20.255076Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715728;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:20.255282Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:20.255342Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708254117659363:2893], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxybpn8bz6qzfvzjtr9fjm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTc0OWEyNzMtZThhZGJkODktM2IxOTNlMDQtZjRiZWJjMmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:20.255437Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715728;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> KqpParams::RowsList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13837, MsgBus: 28064 2026-01-10T12:24:55.874234Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707889730190722:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.874306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.164259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.164347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.189112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.209315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.211884Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.213066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707889730190681:2081] 1768047895872853 != 1768047895872856 2026-01-10T12:24:56.302745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.302796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.302808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.302880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.423429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.706218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:56.880344Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.505457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707902615093456:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.505577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.505853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707902615093466:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.505917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.542457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.656185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707902615093590:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.656282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.656472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707902615093596:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.656483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707902615093595:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.656522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.659508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.669026Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707902615093599:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:24:58.792448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707902615093650:2615] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 18993, MsgBus: 15734 2026-01-10T12:25:00.138267Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707908712640385:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:00.138338Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:00.150535Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:00.221851Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:00.232503Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593707908712640348:2081] 1768047900137194 != 1768047900137197 2026-01-10T12:25:00.281260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:00.281362Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:00.285351Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:00.322459Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:00.322487Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:00.322495Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:00.322579Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:00.431616Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:00.728959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:00.734905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:25:01.143250Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:02.971489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707917302575822:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.971598Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.971897Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707917302575831:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.971950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.992972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:03.037715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707921597543254:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.280401Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.280849Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708224931723637:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.280912Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.309618Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:13.387110Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708224931723735:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.387236Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.387320Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708224931723740:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.387571Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708224931723742:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.387673Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:13.392184Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:13.406341Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593708224931723744:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:26:13.486586Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593708224931723795:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30570, MsgBus: 26016 2026-01-10T12:26:15.075902Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7593708232739331227:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:15.076029Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:15.132213Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:15.252571Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:15.273004Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:15.273156Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:15.303014Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:15.405073Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:15.405108Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:15.405122Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:15.405265Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:15.416411Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:16.085238Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:16.206894Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:20.076262Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708232739331227:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:20.077269Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:20.909588Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708254214168545:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:20.909740Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:20.910151Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708254214168554:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:20.910249Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.044153Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:21.126766Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708258509135952:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.126899Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.127046Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708258509135957:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.127134Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708258509135959:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.127197Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:21.132472Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:21.146453Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708258509135961:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:26:21.229225Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708258509136012:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> TSchemeShardTest::PreserveColumnOrder [GOOD] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestAsyncReassign ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:47.697765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:47.697873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.697923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:47.697974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:47.698028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:47.698067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:47.698164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:47.699980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:47.701371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:47.805037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:47.805130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.822956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:47.823091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:47.823285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:47.832518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:47.833754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:47.836714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:47.839209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:47.846347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.846577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:47.866926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:47.867244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:47.867302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:47.867602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.052247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.053994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... end, to populator: [16:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:26:23.563377Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [16:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:26:23.564045Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.564224Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:26:23.567591Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:23.567772Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:23.567845Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:26:23.567917Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:26:23.568000Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:26:23.568602Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:23.568693Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:23.568728Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:26:23.568761Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:26:23.568798Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:26:23.568881Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:26:23.569582Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2066 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:26:23.569626Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:26:23.569764Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2066 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:26:23.569955Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2066 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-10T12:26:23.571233Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:23.571334Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:26:23.571551Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:23.571689Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:26:23.571866Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 68719479457 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:23.571989Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:23.572061Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.572142Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:26:23.572210Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:26:23.583874Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:26:23.584289Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:26:23.584420Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.592233Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.592820Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.592909Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:26:23.593169Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:26:23.593234Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:23.593315Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:26:23.593379Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:23.593457Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:26:23.593586Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:703:2693] message: TxId: 102 2026-01-10T12:26:23.593680Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:23.593761Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:26:23.593826Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:26:23.594026Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:26:23.596402Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:26:23.596474Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [16:761:2739] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-10T12:26:23.600453Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" PartitionConfig { ChannelProfileId: 0 } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:26:23.600777Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:26:23.601152Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, at schemeshard: 72057594046678944 2026-01-10T12:26:23.604234Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Setting ChannelProfileId to 0 for tables with storage config is not allowed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:26:23.604690Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> KqpParams::ImplicitParameterTypes >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:47.698551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:47.698654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.698707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:47.698750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:47.698796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:47.698829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:47.698925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:47.700268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:47.701360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:47.798337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:47.798419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.815708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:47.815834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:47.817582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:47.831459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:47.833756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:47.836700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:47.839207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:47.846462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.846684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:47.868011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:47.868112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.868229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:47.868289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:47.868356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:47.868617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:47.882708Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:25:48.015245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:25:48.016561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.017523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:25:48.017603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:25:48.018646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:25:48.018767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:48.022307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:48.022514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:25:48.022726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.023574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:25:48.023635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:25:48.023674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:25:48.026396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.026461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:25:48.026503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:25:48.028730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.028788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.028834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:25:48.028881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:25:48.037162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:25:48.041058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:25:48.042770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:25:48.045015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:48.045176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:25:48.045228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:25:48.045624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:25:48.045682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:25:48.046956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:25:48.047081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:25:48.050932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:48.050986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2046 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-10T12:26:24.315763Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2046 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-10T12:26:24.318750Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 68719479431 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:26:24.318841Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:26:24.319063Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 68719479431 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:26:24.319195Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:26:24.319368Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 68719479431 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-10T12:26:24.319494Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:24.319570Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:26:24.319627Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:26:24.319697Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-10T12:26:24.320944Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:26:24.325298Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:26:24.325452Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:26:24.328867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:26:24.329097Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:26:24.329164Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:26:24.329413Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:26:24.329487Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:26:24.329554Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:26:24.329614Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:26:24.329680Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:26:24.329796Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:735:2713] message: TxId: 101 2026-01-10T12:26:24.329883Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:26:24.329952Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:26:24.330015Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:26:24.330283Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:26:24.333058Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:26:24.333141Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [16:736:2714] TestWaitNotification: OK eventTxId 101 2026-01-10T12:26:24.333859Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:26:24.334342Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 534us result status StatusSuccess 2026-01-10T12:26:24.335353Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "col01" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col02" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col03" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "col04" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "col05" Type: "Utf8" TypeId: 4608 Id: 5 NotNull: false IsBuildInProgress: false } Columns { Name: "col06" Type: "Utf8" TypeId: 4608 Id: 6 NotNull: false IsBuildInProgress: false } Columns { Name: "col07" Type: "Utf8" TypeId: 4608 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "col08" Type: "Utf8" TypeId: 4608 Id: 8 NotNull: false IsBuildInProgress: false } Columns { Name: "col09" Type: "Utf8" TypeId: 4608 Id: 9 NotNull: false IsBuildInProgress: false } Columns { Name: "col10" Type: "Utf8" TypeId: 4608 Id: 10 NotNull: false IsBuildInProgress: false } Columns { Name: "col11" Type: "Utf8" TypeId: 4608 Id: 11 NotNull: false IsBuildInProgress: false } Columns { Name: "col12" Type: "Utf8" TypeId: 4608 Id: 12 NotNull: false IsBuildInProgress: false } Columns { Name: "col13" Type: "Utf8" TypeId: 4608 Id: 13 NotNull: false IsBuildInProgress: false } Columns { Name: "col14" Type: "Utf8" TypeId: 4608 Id: 14 NotNull: false IsBuildInProgress: false } Columns { Name: "col15" Type: "Utf8" TypeId: 4608 Id: 15 NotNull: false IsBuildInProgress: false } Columns { Name: "col16" Type: "Utf8" TypeId: 4608 Id: 16 NotNull: false IsBuildInProgress: false } Columns { Name: "col17" Type: "Utf8" TypeId: 4608 Id: 17 NotNull: false IsBuildInProgress: false } Columns { Name: "col18" Type: "Utf8" TypeId: 4608 Id: 18 NotNull: false IsBuildInProgress: false } Columns { Name: "col19" Type: "Utf8" TypeId: 4608 Id: 19 NotNull: false IsBuildInProgress: false } Columns { Name: "col20" Type: "Utf8" TypeId: 4608 Id: 20 NotNull: false IsBuildInProgress: false } Columns { Name: "col21" Type: "Utf8" TypeId: 4608 Id: 21 NotNull: false IsBuildInProgress: false } Columns { Name: "col22" Type: "Utf8" TypeId: 4608 Id: 22 NotNull: false IsBuildInProgress: false } Columns { Name: "col23" Type: "Utf8" TypeId: 4608 Id: 23 NotNull: false IsBuildInProgress: false } Columns { Name: "col24" Type: "Utf8" TypeId: 4608 Id: 24 NotNull: false IsBuildInProgress: false } Columns { Name: "col25" Type: "Utf8" TypeId: 4608 Id: 25 NotNull: false IsBuildInProgress: false } Columns { Name: "col26" Type: "Utf8" TypeId: 4608 Id: 26 NotNull: false IsBuildInProgress: false } Columns { Name: "col27" Type: "Utf8" TypeId: 4608 Id: 27 NotNull: false IsBuildInProgress: false } Columns { Name: "col28" Type: "Utf8" TypeId: 4608 Id: 28 NotNull: false IsBuildInProgress: false } Columns { Name: "col29" Type: "Utf8" TypeId: 4608 Id: 29 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col01" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpStats::JoinNoStatsYql >> THiveTest::TestAsyncReassign [GOOD] >> THiveTest::TestAlterFollower >> KqpParams::CheckQueryCacheForPreparedQuery >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> KqpExplain::PrecomputeRange ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2026-01-10T12:25:38.221053Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708075725413219:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.224774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:38.586063Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] E0110 12:25:38.768661371 457051 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.768822890 457051 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.821617Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:38.899220Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:38.910511Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:38.977250Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:38.977480Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.025578Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.025756Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.053919Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.054015Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.054095Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.054137Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.054502Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27948 2026-01-10T12:25:39.058640Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.058691Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.092943Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.093004Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.108252Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.120022Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.181545Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.188555Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.188895Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.189624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.190875Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.230617Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.234922Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.235159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.235774Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.236644Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.351281Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27948 } ] 2026-01-10T12:25:39.352198Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27948: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on ... de 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708264801772838 RawX2: 4503616807242644 } } DstEndpoint { ActorId { RawX1: 7593708264801772839 RawX2: 4503616807242645 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708264801772839 RawX2: 4503616807242645 } } DstEndpoint { ActorId { RawX1: 7593708264801772834 RawX2: 4503616807242077 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:23.659805Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7593708264801772838:2964] 2026-01-10T12:26:23.659838Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:23.659856Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:23.659874Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-10T12:26:23.659966Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708264801772838 RawX2: 4503616807242644 } } DstEndpoint { ActorId { RawX1: 7593708264801772839 RawX2: 4503616807242645 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708264801772839 RawX2: 4503616807242645 } } DstEndpoint { ActorId { RawX1: 7593708264801772834 RawX2: 4503616807242077 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:23.659987Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:23.659997Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:23.660010Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:23.660025Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:23.660033Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:23.660463Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:23.660483Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. Taken 0 locks 2026-01-10T12:26:23.660496Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:23.660521Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708264801772838:2964], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:23.660540Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708264801772838:2964], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:23.660556Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:23.660577Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:23.660595Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:23.660611Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. returned 0 rows; processed 0 rows 2026-01-10T12:26:23.660645Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. dropping batch for read #0 2026-01-10T12:26:23.660662Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. effective maxinflight 1 sorted 1 2026-01-10T12:26:23.660675Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:23.660690Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715740, task: 1, CA Id [4:7593708264801772838:2964]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:23.660747Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708264801772838:2964], TxId: 281474976715740, task: 1. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:23.660935Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715740;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:23.661100Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:23.661156Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708264801772839:2965], TxId: 281474976715740, task: 2. Ctx: { TraceId : 01kekxyekba5hp1d9e7mfgfspz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MzVlNGQyNjMtNDk4NmM0MmQtOTVmMzJkYzYtNzg2MzkxZmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:23.661251Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715740;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> KqpLimits::WaitCAsStateOnAbort >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink |94.9%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> THiveTest::TestAlterFollower [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> THiveTest::TestBootProgress >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> Yq_1::Basic_EmptyList [GOOD] >> THiveTest::TestFollowers >> THiveTest::TestHiveBalancerWithLimit >> Yq_1::Basic_EmptyDict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:47.699067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:47.699166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:47.699246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:47.699286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:47.699314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:47.699468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:47.700501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:47.701415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:47.807166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:47.807258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.830069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:47.830184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:47.830410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:47.840850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:47.841111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:47.841846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:47.842091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:47.844607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.846050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:47.867001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:47.867296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:47.867366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:47.867631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.032126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.034968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.035034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.035096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.035158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.035224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-10T12:26:28.249142Z node 12 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-10T12:26:28.265434Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:26:28.265707Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.265923Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:26:28.265978Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:26:28.266192Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:26:28.266259Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:28.269407Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:28.269704Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:26:28.270001Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.270116Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:26:28.270184Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:26:28.270241Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:26:28.272615Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.272697Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:26:28.272762Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:26:28.274922Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.274986Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.275061Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:26:28.275141Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:26:28.275353Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:26:28.277217Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:26:28.277403Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:26:28.277836Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:28.278030Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 51539609711 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:26:28.278107Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:26:28.278471Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:26:28.278566Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:26:28.278833Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:26:28.278942Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:26:28.281291Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:26:28.281360Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:26:28.281715Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:26:28.281783Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:26:28.282184Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:26:28.282235Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:26:28.282371Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:26:28.282410Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:26:28.282458Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:26:28.282499Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:26:28.282544Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:26:28.282597Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:26:28.282653Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:26:28.282688Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:26:28.282759Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:26:28.282800Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:26:28.282842Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:26:28.283609Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:26:28.283761Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:26:28.283827Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:26:28.283884Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:26:28.283948Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:26:28.284092Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:26:28.288407Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:26:28.288850Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:28.292258Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:666:2655] Bootstrap 2026-01-10T12:26:28.294009Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:666:2655] Become StateWork (SchemeCache [12:672:2661]) 2026-01-10T12:26:28.295419Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:666:2655] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:26:28.298385Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> KqpExplain::UpdateSecondaryConditional-UseSink >> THiveTest::TestBootProgress [GOOD] >> THiveTest::TestBridgeCreateTablet >> Yq_1::ModifyQuery [GOOD] >> Yq_1::DescribeQuery [GOOD] |94.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::RowsList [GOOD] >> KqpParams::ParameterTypes >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestReassignNonexistentTablet >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2026-01-10T12:25:38.044788Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708074439482998:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.045377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0110 12:25:38.589743175 456820 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-10T12:25:38.588438Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] E0110 12:25:38.607827360 456820 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.705623Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.759687Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.776570Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.776828Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.779782Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.798422Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.869937Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.870180Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.870217Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.870522Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20950 2026-01-10T12:25:38.871801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:38.906724Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.906819Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.906904Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.906965Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.921133Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.957483Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:38.973062Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.080467Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.100915Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.104648Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.123232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.169586Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.180571Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.191149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.203385Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.218760Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.219874Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.249478Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.249605Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20950: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20950 } ] 2026-01-10T12:25:39.297691Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/comp ... lt. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708288023059641 RawX2: 4503616807242690 } } DstEndpoint { ActorId { RawX1: 7593708288023059642 RawX2: 4503616807242691 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } 2026-01-10T12:26:28.446652Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:28.446664Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:28.446677Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:28.446685Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:28.446697Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.446710Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:28.446726Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:28.446739Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:28.446748Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:28.447854Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Received TEvResolveKeySetResult update for table 'Root/yq/queries' 2026-01-10T12:26:28.447931Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Processing resolved ShardId# 72075186224037900, partition range: [(String : yandexcloud://Execute_folder_id, String : utqudnv107hpb9cfg0qc) ; ()), i: 0, state ranges: 0, points: 1 2026-01-10T12:26:28.447950Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Add point to new shardId: 72075186224037900 2026-01-10T12:26:28.448043Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Pending shards States: TShardState{ TabletId: 72075186224037900, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utqudnv107hpb9cfg0qc)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utqudnv107hpb9cfg0qc)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:28.448057Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:28.448068Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. BEFORE: 1.0 2026-01-10T12:26:28.448127Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Send EvRead to shardId: 72075186224037900, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-10T12:26:28.448159Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. AFTER: 0.1 2026-01-10T12:26:28.448171Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:28.449164Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Recv TEvReadResult from ShardID=72075186224037900, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:28.449186Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Taken 0 locks 2026-01-10T12:26:28.449201Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:28.449224Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:28.449243Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.449259Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:28.449278Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. enter pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:28.449302Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. exit pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 1 freeSpace: 8387491 2026-01-10T12:26:28.449323Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. returned 1 rows; processed 1 rows 2026-01-10T12:26:28.449360Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. dropping batch for read #0 2026-01-10T12:26:28.449373Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:28.449385Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:28.449402Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715741, task: 1, CA Id [4:7593708288023059641:3010]. returned async data processed rows 1 left freeSpace 8387491 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:28.449521Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708288023059641:3010], TxId: 281474976715741, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:28.449707Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715741;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:28.449889Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708288023059642:3011], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.450005Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708288023059642:3011], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyk898tks3kgsba29hbkz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NWQ0NDNiMGYtMzdkYzc1MTgtODNjNDcwLTU5MzRkNDY3. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:28.450118Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715741;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:28.555037Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:19750: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:19750 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2026-01-10T12:25:38.172850Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708072224395651:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.173384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0110 12:25:38.784659732 457024 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.784808341 457024 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.802053Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.802138Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.802182Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.802230Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.832752Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.863255Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.913835Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.931637Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.946208Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.968779Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.968948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.999811Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:38.999885Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.032963Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.033046Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.033210Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.033267Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.033957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.035374Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.041322Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.051056Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23757 2026-01-10T12:25:39.126240Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.126339Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.160982Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.162720Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.208292Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.253557Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.253716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.253778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.259385Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.259471Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23757: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23757 } ] 2026-01-10T12:25:39.259872Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory ... 71646926 2026-01-10T12:26:28.781386Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708289992057923 RawX2: 4503616807242654 } } DstEndpoint { ActorId { RawX1: 7593708289992057924 RawX2: 4503616807242655 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } 2026-01-10T12:26:28.781397Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:28.781409Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:28.781423Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:28.781431Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:28.781446Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.781457Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:28.781472Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:28.781488Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:28.781498Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:28.781514Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Received TEvResolveKeySetResult update for table 'Root/yq/queries' 2026-01-10T12:26:28.781579Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Processing resolved ShardId# 72075186224037889, partition range: [(String : yandexcloud://some_folder_id, String : utqudnv105a0nosf1s98) ; ()), i: 0, state ranges: 0, points: 1 2026-01-10T12:26:28.781597Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Add point to new shardId: 72075186224037889 2026-01-10T12:26:28.781682Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Pending shards States: TShardState{ TabletId: 72075186224037889, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudnv105a0nosf1s98)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudnv105a0nosf1s98)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:28.781695Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:28.781706Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. BEFORE: 1.0 2026-01-10T12:26:28.781743Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Send EvRead to shardId: 72075186224037889, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-10T12:26:28.781781Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. AFTER: 0.1 2026-01-10T12:26:28.781793Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:28.782510Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Recv TEvReadResult from ShardID=72075186224037889, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:28.782539Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Taken 0 locks 2026-01-10T12:26:28.782554Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:28.782575Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:28.782594Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.782609Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:28.782627Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. enter pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:28.782649Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. exit pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 1 freeSpace: 8386350 2026-01-10T12:26:28.782667Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. returned 1 rows; processed 1 rows 2026-01-10T12:26:28.782709Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. dropping batch for read #0 2026-01-10T12:26:28.782720Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:28.782731Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:28.782746Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710743, task: 1, CA Id [4:7593708289992057923:2974]. returned async data processed rows 1 left freeSpace 8386350 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:28.782864Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708289992057923:2974], TxId: 281474976710743, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:28.783060Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710743;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:28.783223Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708289992057924:2975], TxId: 281474976710743, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:28.783341Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708289992057924:2975], TxId: 281474976710743, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxykmxb0vfxv7n90r4n2wz. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmZhMWZmMjItZjliNTJlNjctZjdiOGE4NzktMWVkNzgyZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:28.783434Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710743;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THiveTest::TestBridgeCreateTablet [GOOD] >> THiveTest::TestBridgeDisconnect >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> THiveTest::TestReassignNonexistentTablet [GOOD] >> THiveTest::TestLockedTabletsMustNotRestart >> KqpStats::MultiTxStatsFullScan [GOOD] >> Yq_1::DeleteQuery [GOOD] >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2026-01-10T12:25:38.382816Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708073516537643:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.382896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:38.769901Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.796018Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] E0110 12:25:38.825713176 456950 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.825891522 456950 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.824615Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.824714Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.825040Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.905621Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.905769Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.905952Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.906023Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.924987Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.925062Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.956508Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25615 2026-01-10T12:25:38.967365Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.969316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:38.970460Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.971420Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.974719Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.976910Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.977843Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.990323Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:38.991175Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.001579Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.005541Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.006060Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.008948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.082190Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.092568Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.118667Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.186544Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10T12:25:39.186608Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25615 } ] 2026-01-10 ... 1.399620Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715760, task: 1, CA Id [4:7593708300094759847:3026]. returned 1 rows; processed 1 rows 2026-01-10T12:26:31.399682Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715760, task: 1, CA Id [4:7593708300094759847:3026]. dropping batch for read #0 2026-01-10T12:26:31.399696Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715760, task: 1, CA Id [4:7593708300094759847:3026]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:31.399717Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715760, task: 1, CA Id [4:7593708300094759847:3026]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:31.399743Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715760, task: 1, CA Id [4:7593708300094759847:3026]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:31.399904Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708300094759847:3026], TxId: 281474976715760, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:31.400209Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715760;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:31.400443Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:31.400567Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-01-10T12:26:31.400717Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: SelfId: [4:7593708300094759851:3027], TxId: 281474976715760, task: 2. Add data: 233 / 233 2026-01-10T12:26:31.400786Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: SelfId: [4:7593708300094759851:3027], TxId: 281474976715760, task: 2. Send data=233, closed=1, bufferActorId=[4:7593708300094759840:2976] 2026-01-10T12:26:31.400825Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 233 2026-01-10T12:26:31.400845Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-10T12:26:31.400906Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:59:1]). lockId=281474976715751. ActorId=[4:7593708300094759852:2976] 2026-01-10T12:26:31.400990Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Open: token=0 2026-01-10T12:26:31.401181Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Write: token=0 2026-01-10T12:26:31.401297Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Close: token=0 2026-01-10T12:26:31.401351Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [4:7593708300094759851:3027], TxId: 281474976715760, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7593708300094759840:2976] 2026-01-10T12:26:31.401370Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [4:7593708300094759851:3027], TxId: 281474976715760, task: 2. Finished 2026-01-10T12:26:31.401395Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:31.401436Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708300094759848:3027], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxyp8v9sheaehv1davjmxs. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2NmZDI5YTQtNTg5NmEyMDYtOWFlZjY2N2MtZTJmNzQ2Njg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:31.401575Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715760;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:31.401930Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3486: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Start prepare for distributed commit 2026-01-10T12:26:31.401954Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1067: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]SetPrepare; txId=281474976715760 2026-01-10T12:26:31.401969Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Flush data 2026-01-10T12:26:31.402124Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Send EvWrite to ShardID=72075186224037900, isPrepare=1, isImmediateCommit=0, TxId=281474976715760, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715751 DataShard: 72075186224037900 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 59, Size=320, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=320 2026-01-10T12:26:31.402236Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Send EvWrite (external) to ShardID=72075186224037893, isPrepare=1, isRollback=0, TxId=281474976715760, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715751 DataShard: 72075186224037893 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 52, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-10T12:26:31.402869Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Recv EvWriteResult from ShardID=72075186224037900, Status=STATUS_PREPARED, TxId=281474976715760, Locks= , Cookie=1 2026-01-10T12:26:31.402936Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Flush data 2026-01-10T12:26:31.402972Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4150: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Recv EvWriteResult (external) from ShardID=72075186224037893, Status=STATUS_PREPARED, TxId=281474976715760, Locks= , Cookie=0 2026-01-10T12:26:31.402994Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4468: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Got prepared result TxId=281474976715760, TabletId=72075186224037893, Cookie=0 2026-01-10T12:26:31.403036Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3532: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Start distributed commit with TxId=281474976715760 2026-01-10T12:26:31.403058Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1075: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]SetDistributedCommit; txId=281474976715760 2026-01-10T12:26:31.403094Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3719: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2026-01-10T12:26:31.403233Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Got transaction status, status: 16 2026-01-10T12:26:31.413709Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Got transaction status, status: 17 2026-01-10T12:26:31.417047Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4182: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Recv EvWriteResult (external) from ShardID=72075186224037893, Status=STATUS_COMPLETED, TxId=281474976715760, Locks= , Cookie=0 2026-01-10T12:26:31.417080Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4497: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Got completed result TxId=281474976715760, TabletId=72075186224037893, Cookie=0, Locks= 2026-01-10T12:26:31.417330Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Recv EvWriteResult from ShardID=72075186224037900, Status=STATUS_COMPLETED, TxId=281474976715760, Locks= , Cookie=0 2026-01-10T12:26:31.417359Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7593708300094759852:2976], Table: `Root/yq/connections` ([72057594046644480:59:1]), SessionActorId: [4:7593708291504824977:2976]Got completed result TxId=281474976715760, TabletId=72075186224037900, Cookie=0, Mode=2, Locks= 2026-01-10T12:26:31.417384Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [4:7593708300094759840:2976], SessionActorId: [4:7593708291504824977:2976], Committed TxId=281474976715760 |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpTypes::QuerySpecialTypes >> KqpStats::OneShardLocalExec+UseSink >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 16788, MsgBus: 12035 2026-01-10T12:26:03.437095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708179567081415:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:03.437194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:03.789644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:03.815357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:03.815505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:03.873655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:03.896798Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708179567081375:2081] 1768047963427830 != 1768047963427833 2026-01-10T12:26:03.922124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:04.058255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:04.174578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:04.174598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:04.174656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:04.174754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:04.452586Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:04.903997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:04.984509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.158204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.347374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.428574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.182106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708196746952441:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.182233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.188491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708196746952451:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.188616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.840167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.890235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.925151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.953201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.993602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.050334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.090400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.143150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.255804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708201041920629:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.255894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.256475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708201041920635:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.256844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708201041920633:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.256889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.261689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:08.278692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708201041920638:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:08.378009Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708201041920689:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:08.437804Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708179567081415:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:08.437872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:10.759196Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047970731, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 6311, MsgBus: 63729 2026-01-10T12:26:11.668839Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;acto ... to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.232332Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.317277Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.354050Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.399404Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.442319Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.482841Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.534686Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.584313Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.676449Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.785658Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708296750091288:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.785784Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.786476Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708296750091293:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.786484Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708296750091294:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.786546Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.791809Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:30.805617Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708296750091297:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:30.886814Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708296750091348:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:31.065287Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708279570219387:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:31.065376Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:33.064315Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768047993082, txId: 281474976710673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":637,"Sum":637,"History":[3,637],"Min":637},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,3,1048576],"Min":1048576},"Introspections":["1 tasks for a single\/sequential source scan"],"Tasks":1,"FinishedTasks":1,"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1768047993041,"Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"CpuTimeUs":{"Count":1,"Max":891,"Sum":891,"History":[0,721,2,891],"Min":891},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[3,144],"Min":144},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[3,144],"Min":144},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1320,"Sum":1320,"History":[3,1320],"Min":1320},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"UpdateTimeMs":2}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,3,1048576],"Min":1048576},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":0,"BaseTimeMs":1768047993041,"CpuTimeUs":{"Count":1,"Max":565,"Sum":565,"History":[1,479,3,565],"Min":565},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[3,37],"Min":37},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":637,"Sum":637,"History":[3,637],"Min":637},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"UpdateTimeMs":2}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":219006,"ProcessCpuTimeUs":341,"Compilation":{"FromCache":false,"CpuTimeUs":187489,"DurationUs":193651}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.565,"A-Cpu":1.456,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-SelfCpu":0.891,"A-Cpu":0.891,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2026-01-10T12:25:39.091160Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708077216582659:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:39.091260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:39.714962Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] E0110 12:25:39.731523206 457862 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:39.731630208 457862 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:39.732897Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.771012Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.779816Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.839915Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.840800Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.856098Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.856194Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.858380Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.963464Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.987664Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.987757Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:39.999746Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.052804Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.065640Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.069433Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21425 2026-01-10T12:25:40.069993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:40.124625Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:40.131047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:40.219950Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.223972Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.225685Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.227704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:40.240203Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.241144Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.249075Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.286830Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.294183Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.294301Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.294971Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.296973Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21425 } ] 2026-01-10T12:25:40.304662Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Roo ... 0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:31.886605Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Received TEvResolveKeySetResult update for table 'Root/yq/queries' 2026-01-10T12:26:31.886670Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Processing resolved ShardId# 72075186224037894, partition range: [(String : yandexcloud://Execute_folder_id, String : utqudnv1034t08q8mab4) ; ()), i: 0, state ranges: 0, points: 1 2026-01-10T12:26:31.886689Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Add point to new shardId: 72075186224037894 2026-01-10T12:26:31.886796Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Pending shards States: TShardState{ TabletId: 72075186224037894, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utqudnv1034t08q8mab4)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utqudnv1034t08q8mab4)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:31.886811Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:31.886823Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. BEFORE: 1.0 2026-01-10T12:26:31.886873Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Send EvRead to shardId: 72075186224037894, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-10T12:26:31.886917Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. AFTER: 0.1 2026-01-10T12:26:31.886932Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:31.887587Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Recv TEvReadResult from ShardID=72075186224037894, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:31.887607Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Taken 0 locks 2026-01-10T12:26:31.887620Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:31.887650Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708301158839359:3004], TxId: 281474976710746, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxypng83zdy43tpsh0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:31.887671Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708301158839359:3004], TxId: 281474976710746, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxypng83zdy43tpsh0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:31.887690Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:31.887709Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. enter pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:31.887724Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. exit pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:31.887725Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:144: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Start compute actor [4:7593708301158839360:3005], task: 2 2026-01-10T12:26:31.887736Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. returned 0 rows; processed 0 rows 2026-01-10T12:26:31.887744Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:155: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Set execution timeout 299.589277s 2026-01-10T12:26:31.887786Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. dropping batch for read #0 2026-01-10T12:26:31.887799Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:31.887812Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:31.887830Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710746, task: 1, CA Id [4:7593708301158839359:3004]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:31.887904Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7593708301158839359:3004], TxId: 281474976710746, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxypng83zdy43tpsh0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:31.891846Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-10T12:26:31.892094Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708301158839359 RawX2: 4503616807242684 } } DstEndpoint { ActorId { RawX1: 7593708301158839360 RawX2: 4503616807242685 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708301158839360 RawX2: 4503616807242685 } } DstEndpoint { ActorId { RawX1: 7593708301158839355 RawX2: 4503616807242307 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:31.892114Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7593708301158839359:3004] 2026-01-10T12:26:31.892218Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708301158839360:3005], TxId: 281474976710746, task: 2. Ctx: { TraceId : 01kekxypng83zdy43tpsh0hxas. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:31.892375Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710746;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:31.892559Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7593708301158839359:3004], TxId: 281474976710746, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxypng83zdy43tpsh0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:31.892605Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7593708301158839359:3004], TxId: 281474976710746, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxypng83zdy43tpsh0hxas. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjNiMDFhZGQtNzYwOGVmNmItZGZjMDE0MDgtNWNmN2MyNDM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:31.892795Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710746;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:31.899455Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudnv1034t08q8mab4" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] Test command err: 2026-01-10T12:26:04.217647Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.250545Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.250854Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.253232Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.253647Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:26:04.254959Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:26:04.255025Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.256215Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2026-01-10T12:26:04.256263Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.256387Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.256565Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.271250Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.271342Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.273729Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.273931Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274084Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274239Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274369Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274518Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274653Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274683Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.274779Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2026-01-10T12:26:04.274818Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2026-01-10T12:26:04.274870Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.274933Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.275896Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.275989Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.278918Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.279067Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.279403Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.279671Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.280554Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2026-01-10T12:26:04.280590Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.280654Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.280766Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.291343Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.291404Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.293548Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.293722Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.293864Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.294017Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.294175Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.294328Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.294467Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.294496Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.294565Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2026-01-10T12:26:04.294597Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2026-01-10T12:26:04.294643Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.294712Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.295148Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.295613Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:04.298141Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2026-01-10T12:26:04.298233Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.298414Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:04.315052Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:26:04.315106Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.316709Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.316751Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:26:04.317226Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.317303Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:26:04.317710Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.317752Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:98:2089] 2026-01-10T12:26:04.317780Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:98:2089] 2026-01-10T12:26:04.317848Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:04.318064Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:26:04.318099Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:04.318276Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:04.318434Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:26:04.318481Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:26:04.318560Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:04.318665Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:54:2064] 2026-01-10T12:26 ... us# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:26:35.286166Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2026-01-10T12:26:35.286246Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:35.286754Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{18, redo 165b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2026-01-10T12:26:35.286833Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:35.287135Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-10T12:26:35.287235Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:35.287353Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:26:35.287446Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:35.287924Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [4d73cb573d4ef239] received {EvVPutResult Status# OK ID# [72075186224037888:2:1:1:28672:89:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 80700 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2026-01-10T12:26:35.288055Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [4d73cb573d4ef239] Result# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2026-01-10T12:26:35.288139Z node 28 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [4d73cb573d4ef239] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:26:35.288329Z node 28 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:26:35.288482Z node 28 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72075186224037888 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:26:35.288604Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:2} commited cookie 2 for step 1 2026-01-10T12:26:35.288836Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [758c562ff377dcdd] bootstrap ActorId# [28:546:2209] Group# 2147483648 TabletId# 72075186224037888 Channel# 0 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-10T12:26:35.288979Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:489:2162] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:0] collect=[2:0] cookie# 0 2026-01-10T12:26:35.289220Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [5be63a1c46f51038] bootstrap ActorId# [28:547:2210] Group# 2147483649 TabletId# 72075186224037888 Channel# 1 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-10T12:26:35.289270Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:182: [5be63a1c46f51038] Keep# [72075186224037888:1:2:1:8192:289:0] Marker# DSPC04 2026-01-10T12:26:35.289379Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:508:2178] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:1] collect=[2:0] Keep: [72075186224037888:1:2:1:8192:289:0] cookie# 0 2026-01-10T12:26:35.290977Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [758c562ff377dcdd] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2026-01-10T12:26:35.291095Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [758c562ff377dcdd] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2026-01-10T12:26:35.292366Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [5be63a1c46f51038] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2026-01-10T12:26:35.292429Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [5be63a1c46f51038] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2026-01-10T12:26:35.292848Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:548:2316] 2026-01-10T12:26:35.292910Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:548:2316] 2026-01-10T12:26:35.293041Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:35.293127Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-10T12:26:35.293224Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:548:2316] 2026-01-10T12:26:35.293311Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:548:2316] 2026-01-10T12:26:35.293390Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:548:2316] 2026-01-10T12:26:35.293481Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:548:2316] 2026-01-10T12:26:35.293619Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:548:2316] 2026-01-10T12:26:35.293801Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:548:2316] 2026-01-10T12:26:35.293872Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:548:2316] 2026-01-10T12:26:35.293931Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:548:2316] 2026-01-10T12:26:35.294022Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:548:2316] 2026-01-10T12:26:35.294085Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:548:2316] 2026-01-10T12:26:35.294181Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:462:2296] EventType# 268959750 2026-01-10T12:26:35.294439Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} queued, type NKikimr::NHive::TTxSyncTablets 2026-01-10T12:26:35.294529Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:35.294657Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:26:35.294749Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:35.294927Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-10T12:26:35.295007Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:35.295114Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:26:35.295198Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:35.295574Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:551:2319] 2026-01-10T12:26:35.295638Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:551:2319] 2026-01-10T12:26:35.295754Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:35.295832Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-10T12:26:35.295913Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:551:2319] 2026-01-10T12:26:35.295986Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:551:2319] 2026-01-10T12:26:35.296062Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:551:2319] 2026-01-10T12:26:35.296167Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:551:2319] 2026-01-10T12:26:35.296326Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:551:2319] 2026-01-10T12:26:35.296540Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:551:2319] 2026-01-10T12:26:35.296622Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:551:2319] 2026-01-10T12:26:35.296688Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:551:2319] 2026-01-10T12:26:35.296767Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:551:2319] 2026-01-10T12:26:35.296824Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:551:2319] 2026-01-10T12:26:35.296905Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:550:2318] EventType# 268697616 >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> THiveTest::TestBridgeDisconnect [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:47.697769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:47.697879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.697921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:47.697961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:47.697998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:47.698033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:47.698112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:47.699997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:47.701355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:47.811276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:47.811345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.827912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:47.828021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:47.828250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:47.838650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:47.839002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:47.839787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:47.840026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:47.847102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.847278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:47.866947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:47.867207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:47.867259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:47.867584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.053281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.054931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.055660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:26:35.518377Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:26:35.518408Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-10T12:26:35.520152Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-10T12:26:35.520481Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-10T12:26:35.520895Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-10T12:26:35.522429Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-10T12:26:35.522702Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409548 2026-01-10T12:26:35.524388Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-10T12:26:35.524553Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2026-01-10T12:26:35.525430Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:35.525748Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-10T12:26:35.526445Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-10T12:26:35.526658Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-10T12:26:35.526853Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-10T12:26:35.527213Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-10T12:26:35.527388Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2026-01-10T12:26:35.529299Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:26:35.529393Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-10T12:26:35.529512Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:26:35.532124Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-10T12:26:35.532215Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-10T12:26:35.534683Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-10T12:26:35.534730Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-10T12:26:35.534985Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:26:35.535018Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:26:35.535564Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:26:35.535633Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:26:35.536259Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:26:35.536623Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:26:35.536705Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:26:35.537342Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:26:35.537481Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:26:35.537548Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:537:2489] TestWaitNotification: OK eventTxId 103 2026-01-10T12:26:35.538284Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:26:35.538582Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 351us result status StatusPathDoesNotExist 2026-01-10T12:26:35.538807Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2026-01-10T12:26:35.539375Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-10T12:26:35.539456Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2026-01-10T12:26:35.539509Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2026-01-10T12:26:35.539570Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2026-01-10T12:26:35.540298Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:26:35.540567Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 316us result status StatusSuccess 2026-01-10T12:26:35.541155Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> KqpLimits::StreamWrite+Allowed >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> Yq_1::CreateQuery_Without_Connection [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> KqpParams::DefaultParameterValue >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> KqpExplain::UpdateConditional-UseSink >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> KqpQueryDiscard::DiscardSelectSupport >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::ExecuteWriteQuery >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> KqpExplain::UpdateSecondaryConditional-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2026-01-10T12:25:38.072069Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708075689680470:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.072157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0110 12:25:38.557683639 456799 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.557792734 456799 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.736387Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.740448Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.810926Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.814478Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17849 2026-01-10T12:25:38.814654Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.814787Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.823545Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.920602Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.920721Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.920816Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.920887Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.925720Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.928251Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.928348Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.987917Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.988040Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.988106Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:38.988227Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.026105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.077672Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.077820Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.077903Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.078395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:25:39.104710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.232517Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.234075Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.234538Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.236519Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.239770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:39.271218Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.276357Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17849: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17849 } ] 2026-01-10T12:25:39.279454Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/y ... _OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708316172071986 RawX2: 4503603922340744 } } DstEndpoint { ActorId { RawX1: 7593708316172071987 RawX2: 4503603922340745 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } 2026-01-10T12:26:34.577655Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:34.577666Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:34.577681Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:34.577689Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:34.577706Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:34.577718Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-01-10T12:26:34.577731Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-01-10T12:26:34.577743Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:34.577752Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:34.577765Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Received TEvResolveKeySetResult update for table 'Root/yq/pending_small' 2026-01-10T12:26:34.577818Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Processing resolved ShardId# 72075186224037899, partition range: [(String : TestTenant, String : NULL, String : NULL) ; ()), i: 0, state ranges: 1, points: 0 2026-01-10T12:26:34.577881Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:655: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Compare range #0 [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] with partition range [(String : TestTenant, String : NULL, String : NULL) ; ()) : 0 2026-01-10T12:26:34.577932Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:662: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Add range to new shardId: 72075186224037899, range: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] 2026-01-10T12:26:34.578064Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Pending shards States: TShardState{ TabletId: 72075186224037899, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-10T12:26:34.578087Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:34.578096Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. BEFORE: 1.0 2026-01-10T12:26:34.578173Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Send EvRead to shardId: 72075186224037899, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-10T12:26:34.578213Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. AFTER: 0.1 2026-01-10T12:26:34.578230Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-10T12:26:34.578923Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:34.578954Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Taken 0 locks 2026-01-10T12:26:34.578966Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:34.578986Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:34.579002Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:34.579017Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:34.579034Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:34.579047Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:34.579059Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. returned 0 rows; processed 0 rows 2026-01-10T12:26:34.579090Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. dropping batch for read #0 2026-01-10T12:26:34.579101Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:34.579111Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:34.579126Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710736, task: 1, CA Id [1:7593708316172071986:2952]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:34.579167Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593708316172071986:2952], TxId: 281474976710736, task: 1. Ctx: { TraceId : 01kekxysm527gt0snbbz97gg33. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:34.579334Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710736;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:34.579482Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7593708316172071987:2953], TxId: 281474976710736, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxysm527gt0snbbz97gg33. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:34.579529Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7593708316172071987:2953], TxId: 281474976710736, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxysm527gt0snbbz97gg33. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWEzNTkwZGEtZmI2ZTM2OWYtNTA4MDU1NzktNTE4MDg2NDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:34.579609Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710736;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; [good] Yq_1::CreateQuery_Without_Connection |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> KqpTypes::UnsafeTimestampCastV0 >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap >> THiveTest::TestStopTenant [GOOD] >> THiveTest::TestTabletAvailability ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 29931, MsgBus: 1639 2026-01-10T12:26:03.425183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708182200340499:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:03.425245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:03.780188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:03.835585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:03.835709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:03.903352Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:03.927052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:04.024057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:04.177608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:04.177628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:04.177660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:04.177738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:04.444462Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:04.937665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:05.014250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.177124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.361102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.481058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.206287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708199380211533:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.206408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.207009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708199380211543:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.207067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.826728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.881349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.925404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.969512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.003583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.051639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.091894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.148403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.250945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708203675179714:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.251021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.251096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708203675179719:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.251205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708203675179721:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.251232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.257251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:08.270920Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708203675179723:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:08.375240Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708203675179774:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:08.426777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708182200340499:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:08.426848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:10.758928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Trying to start YDB, gRPC: 26136, MsgBus: 7786 2026-01-10T12:26:11.949666Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered; ... ion.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:26:30.275594Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:33.510927Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708310052581126:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.510931Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708310052581117:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.511054Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.511378Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708310052581132:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.511477Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.515429Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:33.526593Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708310052581131:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-10T12:26:33.617022Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708310052581184:2620] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:33.650832Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:33.900368Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.117935Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:34.129721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:34.272232Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708292872711011:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:34.272327Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9968, MsgBus: 21314 2026-01-10T12:26:35.267561Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708316490680435:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:35.267629Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:35.292584Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:35.397302Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708316490680407:2081] 1768047995267033 != 1768047995267036 2026-01-10T12:26:35.408395Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:35.408478Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:35.410402Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:35.425952Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:35.425978Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:35.448517Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:35.469552Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:35.476528Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:35.476552Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:35.476558Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:35.476622Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:36.026287Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:36.092409Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:36.278080Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:39.820790Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333670550461:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.821226Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.822140Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333670550489:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.836147Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:39.853777Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708333670550491:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:26:39.942572Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708333670550553:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:39.983638Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.206529Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708337965517978:2350], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2026-01-10T12:26:40.208197Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=Y2E1YTZiNzItMTNmMjQ2YTMtM2QxOGVmODYtZGViOTNmNjA=, ActorId: [5:7593708337965517976:2349], ActorState: ExecuteState, LegacyTraceId: 01kekxyz5k5b8e4ea3c3yy8f7f, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 5 column: 49 } message: "Creating table with data is not supported." end_position { row: 5 column: 49 } severity: 1 } } tx_id# trace_id# 2026-01-10T12:26:40.268230Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708316490680435:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:40.268349Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> KqpExplain::FullOuterJoin [GOOD] >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> THiveTest::TestTabletAvailability [GOOD] >> THiveTest::TestSetDomain >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:12.011427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:12.118102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:12.126247Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:12.126691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:12.126786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:12.506417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:12.612992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:12.613158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:12.648744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:12.737956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:13.449681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:13.451383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:13.451442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:13.451483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:13.451668Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:13.524983Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:14.127804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:17.444263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:17.452000Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:17.454581Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:17.485609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:17.485760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:17.514696Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:17.516936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:17.722598Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:17.722686Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:17.724243Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.724933Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.725664Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.726594Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.726939Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.727090Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.727255Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.727420Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.727508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:17.744297Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:17.976339Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:18.025027Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:18.025149Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:18.060949Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:18.062916Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:18.063215Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:18.063307Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:18.063383Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:18.063460Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:18.063526Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:18.063589Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:18.064346Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:18.089912Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:18.090036Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:18.117869Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:24:18.119188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:24:18.191600Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:24:18.196000Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:18.220491Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:24:18.220580Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:24:18.220699Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:18.234192Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:18.258289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:18.282917Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:18.283057Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:18.293745Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:18.483356Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:18.504785Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:18.838232Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:18.938061Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:18.938155Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:24:19.680347Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... or_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5981:4676], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 2026-01-10T12:25:42.730805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:45.050635Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:45.050881Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2026-01-10T12:25:45.051010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-10T12:25:48.394703Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:50.601871Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:50.602056Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2026-01-10T12:25:50.602274Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-10T12:25:53.996004Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:25:54.160711Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:25:54.160815Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:54.160872Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:25:54.160932Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:25:56.448751Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:25:56.448967Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-10T12:25:56.449137Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-10T12:25:56.460716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:56.460825Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:59.878120Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:02.123120Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:02.123308Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-10T12:26:02.123444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-10T12:26:05.472713Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:07.939461Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:07.939641Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-10T12:26:07.939786Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-10T12:26:11.436578Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:13.767242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:13.767485Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2026-01-10T12:26:13.767651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-10T12:26:13.811469Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:26:13.811558Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:26:13.811742Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:26:13.825878Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:26:17.116661Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:19.274678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:19.274856Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 9 2026-01-10T12:26:19.274992Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-10T12:26:19.285834Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:26:19.285917Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:26:19.286131Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:26:19.311897Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:26:22.652600Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:24.747140Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:24.747312Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 10 2026-01-10T12:26:24.747445Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-10T12:26:24.769181Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:26:24.769270Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:26:24.769456Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:26:24.786299Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:26:28.330521Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:28.500966Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:26:28.501049Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:26:28.501087Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:26:28.501123Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:26:30.828738Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:30.828934Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-10T12:26:30.829095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-10T12:26:30.862775Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:26:30.862858Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:26:30.863074Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:26:30.887535Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_COLUMNSHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-10T12:26:34.437852Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:36.626004Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:36.626291Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-10T12:26:36.626427Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-10T12:26:36.637313Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:26:36.637427Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:26:36.637655Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:26:36.653472Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-10T12:26:39.912676Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:26:42.328926Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:26:42.329189Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 ... waiting for TEvPropagateStatistics (done) 2026-01-10T12:26:42.329673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-10T12:26:42.329784Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7753:5431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:26:42.330193Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:26:42.330256Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:7753:5431], StatRequests.size() = 1 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> THiveTest::TestSetDomain [GOOD] >> THiveTest::TestSetDomainAlready ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23132, MsgBus: 4204 2026-01-10T12:26:03.424681Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708183136486289:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:03.424744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:03.479334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:03.935223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:03.967187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708183136486258:2081] 1768047963420796 != 1768047963420799 2026-01-10T12:26:03.969988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:03.970115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:03.973820Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:03.981085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:04.146188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:04.176695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:04.176721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:04.176736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:04.176823Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:04.451135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:04.942108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:05.015024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.174521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.335293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.414256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.163716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708200316357316:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.163802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.164028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708200316357325:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.164052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.826746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.879767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.921523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.955314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.991568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.029963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.072545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.135865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.230663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708204611325495:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.230744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.231001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708204611325500:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.231042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708204611325501:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.231301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.240378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:08.252641Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708204611325504:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:08.319886Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708204611325555:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:08.425308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708183136486289:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:08.425386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iter ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:35.325221Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:35.325338Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:35.364056Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:35.367872Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:35.447926Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:35.447969Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:35.447978Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:35.448090Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:35.451150Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:35.968114Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:35.986420Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.055268Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.195120Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:36.250948Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.319779Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.382741Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333949055751:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.382814Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.383154Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333949055760:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.383186Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.482992Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.535372Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.585447Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.625101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.674444Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.718202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.761697Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.830176Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.953602Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333949056628:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.953674Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.953909Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333949056633:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.953949Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708333949056634:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.953991Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.958098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:39.973508Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708333949056637:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:40.068292Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708338244023984:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:40.183847Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708316769184724:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:40.183932Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:41.895580Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.177511Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.220915Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2026-01-10T12:20:50.037079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593706838180005358:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:50.037145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:20:50.129243Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:20:50.600162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:20:50.634969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:20:50.635049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:20:50.758258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:20:50.819598Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:20:50.856477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:20:51.052429Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:20:51.110691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/0019a0/r3tmp/yandexYenqE0.tmp 2026-01-10T12:20:51.110721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/0019a0/r3tmp/yandexYenqE0.tmp 2026-01-10T12:20:51.110856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/0019a0/r3tmp/yandexYenqE0.tmp 2026-01-10T12:20:51.110956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:20:51.353669Z INFO: TTestServer started on Port 30338 GrpcPort 2605 PQClient connected to localhost:2605 2026-01-10T12:20:51.671865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:20:51.687466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:20:51.697914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:20:51.772360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.295136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706855359875526:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.295282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.320197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706855359875539:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.320341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593706855359875540:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.320519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:20:54.338066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:20:54.360261Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593706855359875543:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:20:54.442509Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593706855359875607:2643] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:20:54.749951Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593706855359875615:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:20:54.752837Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDIxN2NjMTEtZWY5NWE2ZTQtZmFjNGI1N2YtNjBmNDYxMmI=, ActorId: [1:7593706855359875524:2331], ActorState: ExecuteState, LegacyTraceId: 01kekxmdcm112fs0g59h8fnevp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:20:54.755554Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:20:54.775258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.812857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:20:54.917420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-10T12:20:55.037039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593706838180005358:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:20:55.037110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7593706859654843188:2821] === CheckClustersList. Ok 2026-01-10T12:21:01.424023Z :WriteToTopic_Demo_11_Table INFO: TTopicSdkTestSetup started 2026-01-10T12:21:01.442315Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-10T12:21:01.471901Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7593706885424647203:2938] connected; active server actors: 1 2026-01-10T12:21:01.473254Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-10T12:21:01.474968Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-10T12:21:01.477186Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-10T12:21:01.483673Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:21:01.496451Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:21:01.497579Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-10T12:21:01.497620Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:21:01.497661Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx wri ... partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.718606Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.718676Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.718689Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.718701Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.718724Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.718735Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:26:40.718768Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:26:40.718784Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.718795Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.718811Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.718831Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.719406Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:26:40.719431Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.719444Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.719460Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.719472Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-10T12:26:40.755642Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.755702Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.755718Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.755743Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.755784Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:26:40.820223Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:26:40.820222Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.820251Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820266Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820269Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.820282Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.820292Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820302Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820318Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.820318Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:26:40.820367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:26:40.820367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:26:40.820381Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820383Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820393Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.820398Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.820411Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820413Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.820422Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.820425Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-10T12:26:40.857994Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.858039Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.858057Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.858096Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.858115Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:26:40.920171Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:26:40.920181Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.920208Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920210Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920226Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.920226Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.920251Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920251Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920270Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-10T12:26:40.920270Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.920648Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:26:40.920674Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920688Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.920706Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920719Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-10T12:26:40.920763Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-10T12:26:40.920777Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920789Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.920804Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.920814Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-10T12:26:40.960029Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:40.960092Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.960111Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:40.960137Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:40.960154Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |94.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestDeleteTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-10T12:25:08.873446Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1768047908873418 2026-01-10T12:25:09.150084Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707950312986544:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.151358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:09.199076Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:09.214654Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707947263737149:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.214839Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:09.231820Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:25:09.373180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:09.389257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:09.521319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:09.521434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:09.522292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:09.522372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:09.549504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:09.576387Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:09.585523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:09.587511Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:25:09.588246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:09.614005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:09.748483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002e77/r3tmp/yandexuhxGWX.tmp 2026-01-10T12:25:09.748505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002e77/r3tmp/yandexuhxGWX.tmp 2026-01-10T12:25:09.748717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002e77/r3tmp/yandexuhxGWX.tmp 2026-01-10T12:25:09.748801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:09.808422Z INFO: TTestServer started on Port 2845 GrpcPort 27919 PQClient connected to localhost:27919 2026-01-10T12:25:10.009052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:10.165079Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.165676Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:10.229095Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:12.395263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707963197889768:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:12.395400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707963197889760:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:12.395787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:12.400142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707963197889776:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:12.400226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:12.402155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:12.435722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707963197889775:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:25:12.556572Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707963197889869:2993] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:12.872524Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593707963197889879:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:12.875578Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7593707960148639285:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:25:12.882101Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODcxNjg3OGQtYTAyNTFiMzktYWM2OWE4NWMtZDIxYjU0ZQ==, ActorId: [2:7593707960148639258:2301], ActorState: ExecuteState, LegacyTraceId: 01kekxw9jy6d8gz841m98rcw60, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:12.882006Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmJmNWMzNDYtOTY2ZTViZTAtOTU2OWVjMTQtYWNhM2ZmYjQ=, ActorId: [1:7593707963197889758:2329], ActorState: ExecuteState, LegacyTraceId: 01kekxw9e87avfwchkxgvtphrs, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:25:12.885775Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:12.885775Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:25:12.904938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, b ... mestampMS: 1768047997290 CreateTimestampMS: 1768047997242 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1768047997291 CreateTimestampMS: 1768047997242 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 4 WriteTimestampMS: 1768047997291 CreateTimestampMS: 1768047997242 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551388 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-10T12:26:42.327544Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-10T12:26:42.327601Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid c30b1d35-abc49c57-d9898c47-3f4b92ac has messages 1 2026-01-10T12:26:42.327749Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 read done: guid# c30b1d35-abc49c57-d9898c47-3f4b92ac, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2026-01-10T12:26:42.327785Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 response to read: guid# c30b1d35-abc49c57-d9898c47-3f4b92ac 2026-01-10T12:26:42.328019Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 Process answer. Aval parts: 0 2026-01-10T12:26:42.328897Z :DEBUG: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:26:42.329136Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-10T12:26:42.329123Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 grpc read done: success# 1, data# { read { } } 2026-01-10T12:26:42.329283Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 got read request: guid# dc787308-f93c1f55-7679b41e-60bbcfb4 2026-01-10T12:26:42.329329Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:26:42.329399Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:26:42.329435Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-10T12:26:42.329462Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-10T12:26:42.329518Z :DEBUG: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-10T12:26:42.329743Z :INFO: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] Closing read session. Close timeout: 0.000000s 2026-01-10T12:26:42.329797Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-10T12:26:42.329854Z :INFO: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 29 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:26:42.329971Z :NOTICE: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:26:42.330025Z :DEBUG: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] [null] Abort session to cluster 2026-01-10T12:26:42.330966Z :NOTICE: [/Root] [/Root] [2d10fa88-e3f8c00f-94838270-33c445cd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:26:42.332014Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0] Write session: close. Timeout = 0 ms 2026-01-10T12:26:42.332063Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0] Write session will now close 2026-01-10T12:26:42.332133Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0] Write session: aborting 2026-01-10T12:26:42.332520Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0] Write session: gracefully shut down, all writes complete 2026-01-10T12:26:42.332563Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0] Write session: destroy 2026-01-10T12:26:42.332941Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 grpc read done: success# 0, data# { } 2026-01-10T12:26:42.332969Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 grpc read failed 2026-01-10T12:26:42.333002Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 grpc closed 2026-01-10T12:26:42.333042Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_7710806044097291999_v1 is DEAD 2026-01-10T12:26:42.336101Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7593708321786602306:2480] destroyed 2026-01-10T12:26:42.336179Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-10T12:26:42.336226Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:42.336249Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.336268Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:42.336295Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.336312Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:26:42.336368Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_7710806044097291999_v1 2026-01-10T12:26:42.336420Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7593708347556406384:2546] destroyed 2026-01-10T12:26:42.336483Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_7710806044097291999_v1 2026-01-10T12:26:42.334303Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0 grpc read done: success: 0 data: 2026-01-10T12:26:42.334344Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0 grpc read failed 2026-01-10T12:26:42.334402Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0 grpc closed 2026-01-10T12:26:42.334441Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|7ce691a1-3859e303-ec28ecce-971101b9_0 is DEAD 2026-01-10T12:26:42.335631Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-10T12:26:42.335740Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593708347556406374:2539] disconnected. 2026-01-10T12:26:42.335765Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593708347556406374:2539] disconnected; active server actors: 1 2026-01-10T12:26:42.335786Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7593708347556406374:2539] client user disconnected session shared/user_11_1_7710806044097291999_v1 2026-01-10T12:26:42.404611Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:42.404651Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.404673Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:42.404714Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.404733Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:26:42.505170Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:26:42.505218Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.505238Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:26:42.505264Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:26:42.505283Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-10T12:26:42.716029Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1254} ActorId: [11:7593708347556406407:2537] TxId: 281474976715683. Ctx: { TraceId: 01kekxz1790jz984greagh705v, Database: /Root, SessionId: ydb://session/3?node_id=11&id=ZDJkOGY4NWMtNGNlNTE2Y2EtOWVjMzNkNGQtNTUyYjNmODU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-10T12:26:42.716230Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7593708347556406411:2537], TxId: 281474976715683, task: 3. Ctx: { CheckpointId : . TraceId : 01kekxz1790jz984greagh705v. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=ZDJkOGY4NWMtNGNlNTE2Y2EtOWVjMzNkNGQtNTUyYjNmODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7593708347556406407:2537], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |94.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable >> THiveTest::TestSetDomainAlready [GOOD] >> THiveTest::TestSetDomainError >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery |94.9%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::JoinNoStatsScan >> THiveTest::TestSetDomainError [GOOD] >> THiveTest::TestTabletsStartingCounter >> KqpQuery::QueryClientTimeout >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink |94.9%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry >> KqpExplain::UpdateSecondaryConditional+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> KqpTypes::SelectNull [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools >> KqpQuery::YqlSyntaxV0 >> THiveTest::TestTabletsStartingCounter [GOOD] >> KqpExplain::ReadTableRangesFullScan [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot >> KqpExplain::ReadTableRanges |94.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpLimits::ManyPartitionsSorting [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] Test command err: 2026-01-10T12:25:59.696877Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.726132Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.726442Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.727263Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.727643Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:25:59.728788Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:25:59.728850Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.729883Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2026-01-10T12:25:59.729927Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.730039Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.730218Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.743949Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.744012Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.746417Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.746625Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.746776Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.746912Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.747050Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.747223Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.747362Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.747391Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.747478Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2026-01-10T12:25:59.747512Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2026-01-10T12:25:59.747560Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.747616Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.748580Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.748669Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.751579Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.751734Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.752066Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.752340Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.753253Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2026-01-10T12:25:59.753288Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.753353Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.753473Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.763243Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.763305Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.765743Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.765916Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766056Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766223Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766366Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766512Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766641Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.766668Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.766733Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2026-01-10T12:25:59.766766Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2026-01-10T12:25:59.766808Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.766872Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.767283Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.767733Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:25:59.768206Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2026-01-10T12:25:59.768267Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.768406Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:25:59.782960Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:25:59.783037Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.784834Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.784886Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:25:59.785438Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.785495Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:25:59.785880Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.785916Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:98:2089] 2026-01-10T12:25:59.785949Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:98:2089] 2026-01-10T12:25:59.786022Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:25:59.786254Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:25:59.786290Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.786449Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:25:59.786611Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:25:59.786666Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:25:59.786740Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:25:59.786834Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:54:2064] 2026-01-10T12:25 ... age_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:49.923305Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:26:49.923389Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:26:49.923459Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:26:49.923539Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.923641Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.923714Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.924144Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [25:325:2304] 2026-01-10T12:26:49.924226Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [25:325:2304] 2026-01-10T12:26:49.924415Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:49.924579Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:49.924732Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:26:49.924816Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:26:49.924907Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.924986Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:26:49.925035Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.925077Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:49.925173Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-10T12:26:49.925305Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [25:325:2304] 2026-01-10T12:26:49.925369Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [25:325:2304] 2026-01-10T12:26:49.925766Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:328:2306] 2026-01-10T12:26:49.925831Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:328:2306] 2026-01-10T12:26:49.925906Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:328:2306] 2026-01-10T12:26:49.926041Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:49.926130Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-10T12:26:49.926238Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:328:2306] 2026-01-10T12:26:49.926315Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:328:2306] 2026-01-10T12:26:49.926422Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:328:2306] 2026-01-10T12:26:49.926583Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:328:2306] 2026-01-10T12:26:49.926810Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:328:2306] 2026-01-10T12:26:49.926905Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:328:2306] 2026-01-10T12:26:49.926974Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:328:2306] 2026-01-10T12:26:49.927058Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:328:2306] 2026-01-10T12:26:49.927116Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:328:2306] 2026-01-10T12:26:49.927203Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:327:2305] EventType# 268697624 2026-01-10T12:26:49.927400Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2026-01-10T12:26:49.927493Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:49.927828Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2026-01-10T12:26:49.927930Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:49.939794Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [aeed6b7f2709b4c0] bootstrap ActorId# [25:331:2309] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:126:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:26:49.940004Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [aeed6b7f2709b4c0] Id# [72057594037927937:2:5:0:0:126:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:26:49.940122Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:5:0:0:126:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:26:49.940231Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG33 2026-01-10T12:26:49.940336Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG32 2026-01-10T12:26:49.940566Z node 25 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [25:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:126:1] FDS# 126 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:26:49.942133Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:126:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80992 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-10T12:26:49.942299Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:26:49.942410Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:26:49.942662Z node 25 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:26:49.942845Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2026-01-10T12:26:49.943303Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:333:2311] 2026-01-10T12:26:49.943364Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:333:2311] 2026-01-10T12:26:49.943494Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:49.943585Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-10T12:26:49.943682Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:333:2311] 2026-01-10T12:26:49.943751Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:333:2311] 2026-01-10T12:26:49.943823Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:333:2311] 2026-01-10T12:26:49.943899Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:333:2311] 2026-01-10T12:26:49.944025Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:333:2311] 2026-01-10T12:26:49.944238Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:333:2311] 2026-01-10T12:26:49.944317Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:333:2311] 2026-01-10T12:26:49.944383Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:333:2311] 2026-01-10T12:26:49.944498Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:333:2311] 2026-01-10T12:26:49.944562Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:333:2311] 2026-01-10T12:26:49.944645Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:332:2310] EventType# 268830214 >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpQueryDiscard::DiscardSelectSupport [GOOD] >> KqpQueryDiscard::DiscardSelectEnsureExecuted >> Yq_1::Basic_EmptyDict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: Trying to start YDB, gRPC: 19563, MsgBus: 14658 2026-01-10T12:26:03.426795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708179189050477:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:03.426854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:03.797629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:03.817355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:03.817444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:03.888068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:03.966068Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:03.996222Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708179189050444:2081] 1768047963424757 != 1768047963424760 2026-01-10T12:26:04.016795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:04.197080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:04.197109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:04.197137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:04.197243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:04.448446Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:04.889397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:04.982338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.163661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.355298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:05.428161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.282195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708196368921509:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.282313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.282704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708196368921519:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.282739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:07.830692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.906064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.953744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:07.991650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.025005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.094014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.131086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.190892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:08.267607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708200663889691:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.267670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.267989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708200663889696:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.267993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708200663889697:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.268027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:08.272501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:08.283276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708200663889700:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:08.367190Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708200663889751:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:08.427055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708179189050477:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:08.427132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:10.351651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... adService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.487475Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:48.500476Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708376249377347:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:26:48.584865Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708376249377402:5428] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 97262 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 13857 affected_shards: 100 } compilation { duration_us: 177504 cpu_time_us: 169562 } process_cpu_time_us: 296 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":238,\"Max\":83,\"Min\":49},\"Chunks\":{\"Count\":4,\"Sum\":4,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":83,\"Max\":83,\"Min\":83},\"FirstMessageMs\":{\"Count\":4,\"Sum\":238,\"Max\":83,\"Min\":49},\"ActiveMessageMs\":{\"Count\":4,\"Max\":83,\"Min\":49},\"Bytes\":{\"Count\":4,\"Sum\":7600,\"Max\":1933,\"Min\":1862,\"History\":[50,1933,52,3795,55,5728,83,7600]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":81004,\"Max\":81004,\"Min\":81004,\"History\":[83,81004]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":83,\"Min\":2}}},{\"Pop\":{},\"Name\":\"4\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[1,4194304,83,4194304]},\"DurationUs\":{\"Count\":4,\"Sum\":224000,\"Max\":78000,\"Min\":46000},\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"WaitOutputTimeUs\":{\"Count\":4,\"Sum\":7315,\"Max\":3211,\"Min\":1055,\"History\":[22,4104,55,4104,68,7315,83,7315]},\"Tasks\":4,\"FinishedTasks\":4,\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"StageDurationUs\":80000,\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"BaseTimeMs\":1768048008771,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":213515,\"Max\":74948,\"Min\":43780,\"History\":[22,51544,42,91835,44,111785,50,118427,52,126467,55,138567,68,200533,83,213515]},\"CpuTimeUs\":{\"Count\":4,\"Sum\":8174,\"Max\":2335,\"Min\":1861,\"History\":[1,869,22,2932,42,4309,44,4913,50,5206,52,5560,55,6282,68,6752,83,8174]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":237,\"Max\":82,\"Min\":49},\"ActiveMessageMs\":{\"Count\":4,\"Max\":82,\"Min\":3},\"FirstMessageMs\":{\"Count\":4,\"Sum\":14,\"Max\":5,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,10208,42,17248,44,20416,50,21824,52,23552,55,26368,68,28512,83,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":223,\"Max\":77,\"Min\":46}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":237,\"Max\":82,\"Min\":49},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":237,\"Max\":82,\"Min\":49},\"FirstMessageMs\":{\"Count\":4,\"Sum\":14,\"Max\":5,\"Min\":3},\"ActiveMessageMs\":{\"Count\":4,\"Max\":82,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,10208,42,17248,44,20416,50,21824,52,23552,55,26368,68,28512,83,35200]},\"PauseMessageMs\":{\"Count\":4,\"Sum\":8,\"Max\":3,\"Min\":1},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":223,\"Max\":77,\"Min\":46},\"WaitTimeUs\":{\"Count\":4,\"Sum\":229027,\"Max\":79028,\"Min\":47311,\"History\":[22,58563,42,100743,44,121517,50,128526,52,137085,55,149999,68,214189,83,229027]},\"WaitPeriods\":{\"Count\":4,\"Sum\":5,\"Max\":2,\"Min\":1},\"WaitMessageMs\":{\"Count\":4,\"Max\":82,\"Min\":1}}}],\"UpdateTimeMs\":83}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":88,\"Max\":88,\"Min\":88},\"FirstMessageMs\":{\"Count\":1,\"Sum\":88,\"Max\":88,\"Min\":88},\"Bytes\":{\"Count\":1,\"Sum\":7576,\"Max\":7576,\"Min\":7576,\"History\":[91,7576]}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,91,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":39000,\"Max\":39000,\"Min\":39000},\"Introspections\":[\"1 minimum tasks for compute\"],\"InputBytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600},\"Tasks\":1,\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":39000,\"BaseTimeMs\":1768048008771,\"CpuTimeUs\":{\"Count\":1,\"Sum\":5683,\"Max\":5683,\"Min\":5683,\"History\":[2,1074,91,5683]},\"UpdateTimeMs\":88,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":84,\"Max\":84,\"Min\":84},\"ActiveMessageMs\":{\"Count\":1,\"Max\":84,\"Min\":83},\"FirstMessageMs\":{\"Count\":1,\"Sum\":83,\"Max\":83,\"Min\":83},\"Bytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600,\"History\":[91,7600]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":83,\"Max\":83,\"Min\":83},\"Chunks\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":83,\"Max\":83,\"Min\":83},\"FirstMessageMs\":{\"Count\":1,\"Sum\":49,\"Max\":49,\"Min\":49},\"ActiveMessageMs\":{\"Count\":1,\"Max\":83,\"Min\":49},\"Bytes\":{\"Count\":1,\"Sum\":7600,\"Max\":7600,\"Min\":7600,\"History\":[91,7600]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34},\"WaitTimeUs\":{\"Count\":1,\"Sum\":20251,\"Max\":20251,\"Min\":20251,\"History\":[91,20251]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":83,\"Min\":2}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":177504,\"CpuTimeUs\":169562},\"ProcessCpuTimeUs\":296,\"TotalDurationUs\":387921,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":105180},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:38\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'368) \'(\'\"_id\" \'\"3752c29d-fa19142b-f1246183-4e8551a4\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'380) \'(\'\"_id\" \'\"907c910e-a6d68af5-44a06aea-7024e925\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 387921 total_cpu_time_us: 183715 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":38},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1768048008\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"c1f9cc87-ec8d013d-3bbcf39d-9e2375e7\",\"version\":\"1.0\"}" 2026-01-10T12:26:49.147837Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708359069503032:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:49.147927Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestDownCounterDeleteNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:48.626356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:48.626459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:48.626511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:48.626558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:48.626597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:48.626628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:48.626693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:48.626799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:48.627578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:48.627888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:48.715842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:48.715894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:48.734392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:48.734515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:48.734716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:48.747521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:48.747807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:48.748520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:48.748778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:48.751340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:48.751625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:48.752791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:48.752839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:48.752938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:48.752975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:48.753007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:48.753169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.910842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.911841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.911982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.912957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-10T12:26:50.770034Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:26:50.771401Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.771523Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:26:50.775276Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:50.775434Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:50.775498Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:26:50.775557Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-10T12:26:50.775638Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:26:50.776933Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:50.777033Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:26:50.777069Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:26:50.777106Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:26:50.777141Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:26:50.777249Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:26:50.777751Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1776 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:26:50.777802Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:26:50.777978Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1776 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-10T12:26:50.778186Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1776 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-10T12:26:50.779147Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:50.779226Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:26:50.779454Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:50.779577Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-10T12:26:50.779760Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-10T12:26:50.779896Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-10T12:26:50.779975Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.780054Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-10T12:26:50.780218Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-10T12:26:50.788040Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:26:50.788285Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:26:50.788410Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.788557Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.788751Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.788815Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:26:50.789047Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:26:50.789115Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:50.789191Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:26:50.789263Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:50.789329Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:26:50.789443Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:708:2697] message: TxId: 102 2026-01-10T12:26:50.789538Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:26:50.789614Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:26:50.789668Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:26:50.789903Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:26:50.792558Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:26:50.792650Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:768:2745] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-10T12:26:50.798972Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "value" FamilyName: "new_family" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:26:50.799339Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:26:50.799712Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Column families require StorageConfig specification, at schemeshard: 72057594046678944 2026-01-10T12:26:50.804601Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Column families require StorageConfig specification" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:26:50.805025Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column families require StorageConfig specification, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] >> KqpQuery::QueryCache >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> THiveTest::TestDownCounterDeleteNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2026-01-10T12:25:38.254594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708073380709038:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:38.258187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:38.281776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E0110 12:25:38.373455409 456927 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:25:38.373739305 456927 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:25:38.608651Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.731406Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.731507Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.806507Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.806646Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.806702Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.808206Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.808370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.851269Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.914475Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.918215Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.918616Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.918696Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.942774Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:38.942941Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.112462Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26110 2026-01-10T12:25:39.112644Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.112729Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.112791Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.112938Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.113741Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.177174Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.180317Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.180442Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.260710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:39.348009Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.376401Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.391668Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.391837Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26110: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26110 } ] 2026-01-10T12:25:39.391868Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:1 ... wX1: 7593708376979671718 RawX2: 4503629692144464 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708376979671718 RawX2: 4503629692144464 } } DstEndpoint { ActorId { RawX1: 7593708376979671713 RawX2: 4503629692143909 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:49.994822Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [7:7593708376979671717:2895] 2026-01-10T12:26:49.994859Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:49.994868Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:49.994884Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-10T12:26:49.994986Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708376979671717 RawX2: 4503629692144463 } } DstEndpoint { ActorId { RawX1: 7593708376979671718 RawX2: 4503629692144464 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7593708376979671718 RawX2: 4503629692144464 } } DstEndpoint { ActorId { RawX1: 7593708376979671713 RawX2: 4503629692143909 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-10T12:26:49.995000Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:49.995008Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:49.995021Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:49.995047Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-01-10T12:26:49.995060Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-10T12:26:49.995607Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-10T12:26:49.995622Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. Taken 0 locks 2026-01-10T12:26:49.995637Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. new data for read #0 seqno = 1 finished = 1 2026-01-10T12:26:49.995661Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708376979671717:2895], TxId: 281474976715733, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-10T12:26:49.995686Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708376979671717:2895], TxId: 281474976715733, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:49.995701Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-10T12:26:49.995721Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-10T12:26:49.995749Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-10T12:26:49.995769Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. returned 1 rows; processed 1 rows 2026-01-10T12:26:49.995814Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. dropping batch for read #0 2026-01-10T12:26:49.995828Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. effective maxinflight 1024 sorted 0 2026-01-10T12:26:49.995841Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-10T12:26:49.995858Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715733, task: 1, CA Id [7:7593708376979671717:2895]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-10T12:26:49.996005Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7593708376979671717:2895], TxId: 281474976715733, task: 1. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:49.996246Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715733;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-10T12:26:49.996443Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-10T12:26:49.996557Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7593708376979671718:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01kekxz8r551eexhvh9wcx7tsh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=M2I3OTQxOGQtYjE5NTIyY2UtOWJiZDdlZmItNzY2N2M2MzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-10T12:26:49.996659Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715733;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; E0110 12:26:50.005754770 473540 dns_resolver_ares.cc:452] no server name supplied in dns URI E0110 12:26:50.005940763 473540 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-10T12:26:50.052671Z node 7 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:16996: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:16996 >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn >> KqpStats::DataQueryWithEffects+UseSink >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 8833, MsgBus: 22372 2026-01-10T12:26:24.438585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708270475028819:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:24.438737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:24.703632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:24.703761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:24.740008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:24.753427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:24.753961Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:24.755680Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708270475028705:2081] 1768047984431968 != 1768047984431971 2026-01-10T12:26:24.857062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:24.857083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:24.857094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:24.857218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:24.953770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:25.365744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:25.371542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:25.433307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:25.446340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:25.573225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:25.744700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:25.828640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:27.929918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708283359932469:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:27.930181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:27.932271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708283359932479:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:27.932336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.293469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.336687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.380405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.416638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.452703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.512839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.549255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.591613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.665335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708287654900644:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.665428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.665797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708287654900649:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.665848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708287654900650:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.666156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:28.669954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:28.682065Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708287654900653:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:28.737585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708287654900704:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:29.436544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708270475028819:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:29.436630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11659, MsgBus: 1887 2026-01-10T12:26:31.725397Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... peResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:47.310425Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593708369773411845:2334], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-10T12:26:47.310914Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YWQ5ZWYyZTYtOTAwMjRmNDEtYmQ3OWExYTUtNDBkZDg1OGQ=, ActorId: [4:7593708369773411732:2322], ActorState: ExecuteState, LegacyTraceId: 01kekxz33a78n27q0ydttg3k9m, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 62 } message: "At function: KiCreateTable!" end_position { row: 6 column: 62 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:26:47.377555Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593708369773411869:2343], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-10T12:26:47.380275Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZDViY2JkMWUtOTcwZTE2NTAtYmIwMDIwYjQtM2VjYWI0NGI=, ActorId: [4:7593708369773411858:2337], ActorState: ExecuteState, LegacyTraceId: 01kekxz64vbhywk2kq204d1vva, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 45 } message: "At function: KiCreateTable!" end_position { row: 6 column: 45 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:26:47.450770Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593708369773411888:2349], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-10T12:26:47.452469Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZTM0Mzk1YjYtZjczMWViOTctOGEzMDYxYzItNGI2ZjU4Yzk=, ActorId: [4:7593708369773411882:2346], ActorState: ExecuteState, LegacyTraceId: 01kekxz674786rcz6v456248mx, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 43 } message: "At function: KiCreateTable!" end_position { row: 6 column: 43 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 16701, MsgBus: 3364 2026-01-10T12:26:48.379948Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708374171233889:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:48.380012Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:48.394192Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:48.499543Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:48.502902Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:48.502991Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:48.504418Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:48.534821Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:48.624150Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:48.624173Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:48.624181Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:48.624269Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:48.753807Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:49.129313Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:49.134673Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:49.145579Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:26:49.198060Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:26:49.386144Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:51.922720Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708387056136674:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.922834Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.923203Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708387056136710:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.923248Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708387056136711:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.923289Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.928012Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:51.940112Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708387056136714:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-10T12:26:52.042810Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708391351104061:2602] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:52.086927Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.349754Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-10T12:26:52.373278Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.633891Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:52.642065Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:52.653190Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestDownCounterDeleteNode [GOOD] Test command err: 2026-01-10T12:26:04.567875Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.599980Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.600311Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.601276Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.601619Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.602728Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:74:2076] ControllerId# 72057594037932033 2026-01-10T12:26:04.602772Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.602891Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.603027Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.614749Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.614825Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.617661Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:81:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.617838Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:82:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.617977Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:83:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.618136Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:84:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.618297Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:85:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.618447Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:86:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.618579Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:87:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.618609Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.618700Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:74:2076] 2026-01-10T12:26:04.618736Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:74:2076] 2026-01-10T12:26:04.618788Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.618854Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.619506Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.619659Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.622868Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.623029Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.623418Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.623710Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:26:04.625347Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:26:04.625423Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.626479Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:98:2078] ControllerId# 72057594037932033 2026-01-10T12:26:04.626525Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.626597Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.626704Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.628545Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:67:2065] 2026-01-10T12:26:04.628590Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:67:2065] 2026-01-10T12:26:04.640023Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.640101Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.641972Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642163Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642309Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642450Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642587Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642728Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642861Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.642911Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.642988Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:98:2078] 2026-01-10T12:26:04.643024Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:98:2078] 2026-01-10T12:26:04.643068Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.643111Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.643996Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.644070Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.647371Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.647518Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.647890Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.648157Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.649172Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-10T12:26:04.649207Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.649275Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.649400Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.659789Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.659849Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.661924Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662112Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662285Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662460Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662641Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662822Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.662980Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targ ... ID: 72057594037936131 Cookie: 2} 2026-01-10T12:26:53.321407Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.321482Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.321526Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.321604Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-10T12:26:53.321673Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-10T12:26:53.321710Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-10T12:26:53.353689Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [31:79:2064] 2026-01-10T12:26:53.353758Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [31:79:2064] 2026-01-10T12:26:53.353851Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:53.353897Z node 31 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 31 selfDC 2 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:26:53.354058Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:53.354378Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-10T12:26:53.354436Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-10T12:26:53.354478Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-10T12:26:53.354672Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.354743Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.354788Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.354871Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-10T12:26:53.354925Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [31:79:2064] 2026-01-10T12:26:53.354956Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [31:79:2064] 2026-01-10T12:26:53.365824Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [30:54:2064] 2026-01-10T12:26:53.365887Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [30:54:2064] 2026-01-10T12:26:53.365985Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:53.366030Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:26:53.366163Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:53.366268Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-10T12:26:53.366339Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-10T12:26:53.366375Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-10T12:26:53.366420Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.366481Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.366523Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.366591Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-10T12:26:53.366643Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [30:54:2064] 2026-01-10T12:26:53.366676Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [30:54:2064] 2026-01-10T12:26:53.444398Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [30:199:2162] 2026-01-10T12:26:53.444458Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [30:199:2162] 2026-01-10T12:26:53.444550Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:53.444597Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:26:53.444770Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:53.444914Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-10T12:26:53.444970Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-10T12:26:53.445008Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-10T12:26:53.445054Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.445126Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.445196Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:53.445266Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-10T12:26:53.445334Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-10T12:26:53.445369Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-10T12:26:53.511656Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} queued, type NKikimr::NHive::TTxDeleteNode 2026-01-10T12:26:53.511789Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:26:53.512119Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} hope 1 -> done Change{10, redo 64b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-10T12:26:53.512236Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:26:53.513311Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [30:537:2318] 2026-01-10T12:26:53.513399Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [30:537:2318] 2026-01-10T12:26:53.513496Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [30:537:2318] 2026-01-10T12:26:53.513617Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [30:452:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:53.513703Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 30 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:452:2266] 2026-01-10T12:26:53.513802Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [30:537:2318] 2026-01-10T12:26:53.513876Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [30:537:2318] 2026-01-10T12:26:53.513986Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [30:537:2318] 2026-01-10T12:26:53.514161Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [30:537:2318] 2026-01-10T12:26:53.514334Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [30:537:2318] 2026-01-10T12:26:53.514424Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [30:537:2318] 2026-01-10T12:26:53.514501Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [30:537:2318] 2026-01-10T12:26:53.514587Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [30:537:2318] 2026-01-10T12:26:53.514829Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [30:537:2318] 2026-01-10T12:26:53.514930Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [30:536:2317] EventType# 268830214 |95.0%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::EmptyListForListParameterExecuteDataQuery [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpQuery::YqlTableSample >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink |95.0%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::MissingParameter >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:04.185326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:04.310119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:04.319533Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:04.319957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:04.320048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:04.742266Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:04.872398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.872499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.907365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.003137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:05.749909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:05.751449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:05.751509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:05.751546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:05.751718Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:05.822025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:06.444258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.956500Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.965586Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:09.968558Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:10.010038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:10.010169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:10.045875Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:10.049159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:10.260786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:10.260905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:10.262604Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.263231Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.264007Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.265111Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.265541Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.265841Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.266055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.266366Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.266512Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:10.285118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:10.508634Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:10.558031Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:10.558134Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:10.597177Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:10.598481Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:10.598662Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:10.598722Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:10.598773Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:10.598820Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:10.598868Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:10.598913Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:10.599623Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:10.628894Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.629035Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2126:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.655534Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2173:2625] 2026-01-10T12:24:10.656726Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2173:2625], schemeshard id = 72075186224037897 2026-01-10T12:24:10.729696Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2026-01-10T12:24:10.733062Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:10.752039Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Describe result: PathErrorUnknown 2026-01-10T12:24:10.752126Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Creating table 2026-01-10T12:24:10.752216Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:10.764872Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2320:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.792205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.824315Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.824484Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.838086Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:11.045619Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:11.240111Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:11.395593Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:11.495069Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:11.495165Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2247:2665] Owner: [2:2245:2663]. Column diff is empty, finishing 2026-01-10T12:24:12.302843Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... PathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:45.081580Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4600:4099]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:45.081971Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:24:45.082091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4602:4101] 2026-01-10T12:24:45.082157Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4602:4101] 2026-01-10T12:24:45.082624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4603:4102] 2026-01-10T12:24:45.082756Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4602:4101], server id = [2:4603:4102], tablet id = 72075186224037894, status = OK 2026-01-10T12:24:45.082909Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4603:4102], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:45.083015Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:24:45.083212Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:24:45.083326Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4600:4099], StatRequests.size() = 1 2026-01-10T12:24:45.083426Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:25:46.065167Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxve27a2nwt5nzbdpytjfm", SessionId: ydb://session/3?node_id=2&id=YzdmMTVmZWQtNWNjMWVmOWItYjJiNTAyMy1iZmZkMTYwZg==, Slow query, duration: 61.699158s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value),min(Value),max(Value) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-10T12:25:46.066824Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4472:4024], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:25:46.067125Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4472:4024], Start read next stream part 2026-01-10T12:25:46.067655Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-10T12:25:46.067762Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:25:46.071285Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1` 2026-01-10T12:25:46.071410Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], Start read next stream part 2026-01-10T12:25:46.072593Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4472:4024], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:25:46.072697Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4472:4024], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmFlOTg1NzAtMWE2YWY2MWUtYzU4MjQ0ZmItN2JiMzcxNWM=, TxId: 2026-01-10T12:25:46.249519Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4766:4248]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:25:46.249847Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:25:46.249911Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4766:4248], StatRequests.size() = 1 2026-01-10T12:26:52.993662Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxxaaqehqfvmacegdxgrmz", SessionId: ydb://session/3?node_id=2&id=NjQ4MzlmNjQtNWRmNjQ4NjEtYjQ0ZGMwMzUtZGE3YWM5M2I=, Slow query, duration: 66.917225s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-10T12:26:52.996304Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:26:52.996443Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], Start read next stream part 2026-01-10T12:26:52.996794Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32200, txId: 18446744073709551615] shutting down 2026-01-10T12:26:52.998921Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4879:4351], ActorId: [2:4882:4353], Starting query actor #1 [2:4883:4354] 2026-01-10T12:26:52.999024Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4882:4353], ActorId: [2:4883:4354], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:26:52.999364Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:26:52.999425Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4462:2462], ActorId: [2:4740:4226], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTk4OGM0YTUtOWExYTNiMGYtODQ3ZGVmM2QtNTIxNjllNzM=, TxId: 2026-01-10T12:26:53.004667Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4882:4353], ActorId: [2:4883:4354], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGM1ZTJmLWUwNWQxMTgyLTNjMGI3ODAxLTVhMGYxYWZk, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:26:53.138421Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4896:4366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:26:53.138894Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:26:53.138967Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4896:4366], StatRequests.size() = 1 2026-01-10T12:26:53.339039Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4882:4353], ActorId: [2:4883:4354], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGM1ZTJmLWUwNWQxMTgyLTNjMGI3ODAxLTVhMGYxYWZk, TxId: 2026-01-10T12:26:53.339141Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4882:4353], ActorId: [2:4883:4354], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGM1ZTJmLWUwNWQxMTgyLTNjMGI3ODAxLTVhMGYxYWZk, TxId: 2026-01-10T12:26:53.339623Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4879:4351], ActorId: [2:4882:4353], Got response [2:4883:4354] SUCCESS 2026-01-10T12:26:53.340500Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:26:53.379334Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:26:53.379431Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3340:3456] 2026-01-10T12:26:53.380204Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4913:4376]], StatType[ 3 ], StatRequestsCount[ 1 ] 2026-01-10T12:26:53.380591Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:26:53.380659Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-10T12:26:53.380958Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:26:53.381024Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-10T12:26:53.381093Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 2 ] 2026-01-10T12:26:53.390677Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> THiveTest::TestBridgeDisconnectWithReboots [GOOD] >> THiveTest::TestBridgeDemotion >> KqpLimits::BigParameter >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal >> KqpQueryDiscard::DiscardSelectEnsureExecuted [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> KqpExplain::UpdateOnSecondary+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink >> KqpPg::LongDomainName [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> TTxAllocatorClientTest::ZeroRange [GOOD] >> KqpQuery::RewriteIfPresentToMap >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2026-01-10T12:25:30.792981Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-10T12:25:30.794943Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-10T12:25:30.797430Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-10T12:25:30.811891Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.815828Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-10T12:25:30.835230Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835379Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835509Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-10T12:25:30.835686Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835784Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.835939Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-10T12:25:30.836123Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-10T12:25:30.841494Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-10T12:25:30.847647Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847721Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:25:30.847827Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-10T12:25:30.847884Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 17812, MsgBus: 16908 2026-01-10T12:24:55.860835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707888402010981:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.860965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.060783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.135564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.135715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.181163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.182796Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707888402010936:2081] 1768047895857090 != 1768047895857093 2026-01-10T12:24:56.190031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.282030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.303263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.303288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.303294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.303379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.697747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:56.868119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.380593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901286913711:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.381056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.381246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901286913724:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.381265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901286913723:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.381291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.386132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.397404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707901286913727:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:24:58.491043Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707901286913778:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22493, MsgBus: 22311 2026-01-10T12:24:59.522230Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593707905820558416:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:59.522287Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:59.532985Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:59.604285Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:59.605464Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593707905820558372:2081] 1768047899521318 != 1768047899521321 2026-01-10T12:24:59.644287Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:59.644379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:59.649368Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:59.687947Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:59.687972Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:59.687989Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:59.688124Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:59.726966Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:00.116838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:00.528536Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:02.278766Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707918705461150:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.278864Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593707918705461142:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.279010Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:02.284287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:02.295313Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593707918705461156:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:25:02.356157Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593707918705461218:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16442, MsgBus: 16650 2026-01-10T12:25:03.303528Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593707924581252789:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:03.303616Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:03.315572Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:03.392034Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:03.421966Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:03.422052Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:03.424958Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:03.458775Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:03.458803Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:03.458812Z node 3 :NET_CLASSIFIER WARN: net ... ccess permissions } 2026-01-10T12:26:47.345039Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708370658782068:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.345142Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.351785Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:47.371008Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7593708370658782067:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:26:47.449050Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7593708370658782120:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:47.495617Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 27424, MsgBus: 15298 2026-01-10T12:26:51.499777Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7593708388930977500:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:51.500211Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:51.528860Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:51.691632Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:51.712527Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-10T12:26:51.717586Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:51.717720Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:51.755092Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:51.922615Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:51.922647Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:51.922662Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:51.922795Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:51.925361Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-10T12:26:52.506401Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:52.843014Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:52.853347Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:56.507064Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593708388930977500:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:56.507205Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:57.357313Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708414700782114:2325], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.357477Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.357929Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708414700782126:2328], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.357990Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708414700782127:2329], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.358177Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.364064Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:57.383528Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593708414700782130:2330], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:26:57.469179Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593708414700782181:2545] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:57.513085Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpQuery::CurrentUtcTimestamp >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheTtl >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryMulti >> THiveTest::TestBridgeDemotion [GOOD] >> THiveTest::TestBridgeBalance ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 26023, MsgBus: 7817 2026-01-10T12:26:26.656979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708281155005098:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:26.657977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:26.883086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:26.883240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:26.939504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:26.956824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:26.961490Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708281155004929:2081] 1768047986646216 != 1768047986646219 2026-01-10T12:26:26.970094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:27.076360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:27.076383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:27.076415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:27.076519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:27.170086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:27.660905Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:27.661610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:27.675653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:27.750015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:27.923975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.104292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.199456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.087047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708298334875982:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.087138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.087383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708298334875992:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.087419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.468348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.507407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.549269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.582637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.614407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.653310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.711404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.762352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.865613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708298334876880:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.865706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.866133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708298334876885:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.866177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708298334876886:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.866290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.870746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:30.885711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708298334876889:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:30.989682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708298334876940:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:31.656205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708281155005098:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:31.656300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28327, MsgBus: 3180 2026-01-10T12:26:34.085115Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... s: 20173 2026-01-10T12:26:49.756892Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708377996156522:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:49.757027Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:49.772709Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:49.810065Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708377996156273:2081] 1768048009745338 != 1768048009745341 2026-01-10T12:26:49.858437Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:49.866585Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:49.866675Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:49.892517Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:49.998224Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:50.018612Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:50.018640Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:50.018652Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:50.018728Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:50.513596Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:50.521107Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:50.534862Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.600625Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.761207Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:50.773264Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.868159Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.041463Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708399470994620:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.041580Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.042103Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708399470994630:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.042182Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.127112Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.159501Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.199085Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.240228Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.280834Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.333491Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.373556Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.428667Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.539446Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708399470995501:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.539559Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.540006Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708399470995506:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.540057Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708399470995507:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.540111Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:54.544350Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:54.556665Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708399470995510:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:54.653567Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708399470995561:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:54.756895Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708377996156522:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:54.756955Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:00.103908Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768048017484, txId: 281474976710673] shutting down |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::StatsProfile >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> KqpLimits::TooBigQuery+useSink >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] >> KqpLimits::QueryReplySize >> KqpQuery::QueryTimeout >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 32285, MsgBus: 18444 2026-01-10T12:26:25.826782Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708275572244962:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:25.826872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:25.860582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:26.086152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:26.188983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:26.189119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:26.191816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:26.273221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708275572244847:2081] 1768047985810601 != 1768047985810604 2026-01-10T12:26:26.276320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:26.337548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:26.372923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:26.372954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:26.372965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:26.373051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:26.830910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:26.859346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:26.918954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:27.084958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:27.280810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:27.353017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.316445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708292752115905:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.316599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.317069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708292752115915:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.317129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.604743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.635637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.671366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.704574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.736621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.776871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.810895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.867449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.943795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708292752116786:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.943912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.944555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708292752116791:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.944626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708292752116792:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.944663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:29.948810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:29.961899Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708292752116795:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:30.061012Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708297047084142:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:30.818345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708275572244962:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:30.818430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20324, MsgBus: 3759 2026-01-10T12:26:32.961134Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... DATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23407, MsgBus: 7606 2026-01-10T12:26:56.053730Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708408383788651:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:56.054411Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:56.083390Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:56.186738Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708408383788594:2081] 1768048016050942 != 1768048016050945 2026-01-10T12:26:56.199373Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:56.200931Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:56.201029Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:56.207803Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:56.296750Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:56.296776Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:56.296785Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:56.296870Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:56.375213Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:56.806750Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:56.812333Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:56.816173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:56.882795Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.055059Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.103713Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:57.146712Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.452381Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708425563659655:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.452495Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.452838Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708425563659665:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.452919Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.542654Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.595726Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.641372Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.693313Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.747772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.808111Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.863374Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.934483Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.044277Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708429858627830:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.044427Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.045069Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708429858627835:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.045131Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708429858627836:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.045307Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.050041Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:01.054471Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708408383788651:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:01.054877Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:01.062609Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708429858627839:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:01.143934Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708429858627893:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> KqpExplain::Predicates [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal >> THiveTest::TestBridgeBalance [GOOD] >> THiveTest::TestBridgeFollowers >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWrite >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 17657, MsgBus: 2560 2026-01-10T12:26:32.577017Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708306506037718:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:32.582422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:32.876363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:32.876480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:32.905547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:32.919027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:32.926702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:32.927026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708306506037671:2081] 1768047992574504 != 1768047992574507 2026-01-10T12:26:33.041489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:33.041531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:33.041544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:33.041685Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:33.132558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:33.479908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:33.591434Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:35.731199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708319390940455:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:35.731266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708319390940446:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:35.731524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:35.731879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708319390940462:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:35.731928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:35.734557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:35.745857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708319390940460:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:26:35.875753Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708319390940513:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:36.176733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.888095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:37.138121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:37.147740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 61641, MsgBus: 1482 2026-01-10T12:26:37.871712Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708325145227334:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:37.871797Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:37.887437Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:37.961959Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:37.972411Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708325145227306:2081] 1768047997870736 != 1768047997870739 2026-01-10T12:26:38.000896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:38.000980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:38.003304Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:38.039365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:38.039390Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:38.039398Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:38.039477Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:38.135840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:38.441896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:38.877762Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:41.186164Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708342325097372:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.186174Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708342325097380:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.186240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.186536Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708342325097387:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.186591Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.189552Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:41.199543Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708342325097386:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transa ... ype: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156358Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037904 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156421Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037902 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156450Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037898 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156477Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037900 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156524Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156546Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156572Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.156597Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037925 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-10T12:26:59.160895Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:59.168938Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 12374, MsgBus: 4759 2026-01-10T12:27:00.697297Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708425129411743:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:00.697522Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:00.718939Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:00.856325Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:00.864759Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708425129411644:2081] 1768048020692570 != 1768048020692573 2026-01-10T12:27:00.878284Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:00.884376Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:00.884500Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:00.911521Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:01.010549Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:01.018021Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:01.018053Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:01.018064Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:01.018166Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:01.549538Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:01.556549Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:01.559073Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:27:01.633115Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-10T12:27:01.649622Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:27:01.714271Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:05.110168Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708446604249099:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:05.110255Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708446604249110:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:05.110321Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:05.113178Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708446604249114:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:05.113274Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:05.115615Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:05.127558Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708446604249113:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-10T12:27:05.209745Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708446604249166:2603] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:05.243512Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:05.555432Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:05.695936Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708425129411743:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:05.696010Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:05.869750Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:05.878005Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:05.889686Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 6271, MsgBus: 3511 2026-01-10T12:26:27.924842Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708284230307158:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:27.925590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:28.164005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:28.191315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:28.191620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:28.255002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:28.268378Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:28.368715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:28.368738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:28.368745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:28.368828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:28.464054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:28.803177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:28.813120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:28.867558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.950259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:29.063280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.224650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:29.290835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.210251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708301410178164:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.210389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.213151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708301410178174:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.213245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.576569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.613595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.645982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.679220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.710595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.759637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.819584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.862240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.957737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708301410179048:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.957874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.958298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708301410179053:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.958355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708301410179054:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.958503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.962772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:31.979779Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708301410179057:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:32.049200Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708305705146406:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:32.917318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708284230307158:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:32.917380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":" ... ng to start YDB, gRPC: 12597, MsgBus: 27113 2026-01-10T12:26:58.205199Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708416830844494:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:58.205376Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:58.224678Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:58.337073Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:58.361024Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:58.361123Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:58.394313Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:58.469022Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:58.487483Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:58.487506Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:58.487520Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:58.487630Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:59.058244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:59.065483Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:59.084166Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.181777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.295630Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:59.372257Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.460334Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.522158Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708434010715494:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.522269Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.522593Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708434010715504:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.522650Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.649107Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.703015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.754331Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.792373Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.827543Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.871963Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.917469Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.974513Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.073207Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708438305683676:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.073349Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.073659Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708438305683682:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.073716Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708438305683680:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.074059Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.078102Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:03.125801Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708438305683685:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:03.208288Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708416830844494:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:03.208389Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:03.212044Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708438305683736:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:05.323291Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 7444, MsgBus: 19539 2026-01-10T12:26:35.472195Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708319776482529:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:35.472427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:35.506111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:35.789270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:35.789394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:35.849183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:35.853258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:35.908004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:35.928398Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708319776482504:2081] 1768047995468824 != 1768047995468827 2026-01-10T12:26:36.008893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:36.008929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:36.008942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:36.009066Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:36.071980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:36.415557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:36.478498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.479765Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:36.628360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.777955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.839956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:38.574001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708332661386265:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.574097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.574394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708332661386275:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.574427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.932398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:38.978148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.016389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.061485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.098819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.152175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.199568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.306536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.394252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336956354443:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.394348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.394443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336956354448:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.394776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336956354450:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.394820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.398764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:39.412625Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708336956354452:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:39.497810Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708336956354505:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:40.470954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708319776482529:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:40.471020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29893, MsgBus: 63089 2026-01-10T12:26:42.354175Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.592552Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.592722Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.593629Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.593685Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037994;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.594569Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.594596Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037990;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.595413Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.595523Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.596408Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.597251Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.598122Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.598749Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.599901Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.601219Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.602139Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.602411Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.602980Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.603340Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037992;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.604278Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.605247Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.605754Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.606147Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.606788Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:05.610135Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037964;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpExplain::UpdateOn+UseSink [GOOD] >> KqpExplain::UpdateOn-UseSink >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpParams::MissingOptionalParameter-UseSink >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> THiveTest::TestBridgeFollowers [GOOD] >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> KqpExplain::Explain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 4219, MsgBus: 10434 2026-01-10T12:26:42.497579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708350191161935:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:42.497685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:42.551719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:42.772050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:42.772218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:42.777205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:42.835326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:42.878121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708350191161896:2081] 1768048002494484 != 1768048002494487 2026-01-10T12:26:42.886654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:43.001164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:43.001197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:43.001208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:43.001293Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:43.120042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:43.448606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:43.513436Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:43.513625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.665527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.817814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.883698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:45.878406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708363076065666:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:45.878538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:45.878895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708363076065676:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:45.878953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.274324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.311027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.354317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.390390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.425269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.467033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.507886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.594178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:46.690234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708367371033849:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.690329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.690734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708367371033855:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.691043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708367371033854:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.691177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:46.694971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:46.713392Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708367371033858:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:46.789725Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708367371033909:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:47.503677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708350191161935:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:47.557880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:48.341346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... 1-10T12:27:08.516571Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.516590Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.520400Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.520469Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.520490Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.525843Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.525937Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.525957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.529508Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.529617Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.529639Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.534496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.534557Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.534571Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.539144Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.539218Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.539235Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.542848Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.542904Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.542920Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.548466Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.548533Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.548553Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.551246Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.551303Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.551318Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.557743Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.557811Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.557835Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.560206Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.560267Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.560288Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.567356Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.567433Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.567472Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.568729Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.568784Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.568801Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.577424Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.577495Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.577513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-10T12:27:08.610859Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708461436681331:2791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:08.610959Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:08.615199Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708461436681333:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:08.615322Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:08.626708Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708461436681342:3864] txid# 281474976710660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::UpdateThenDelete-UseSink >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard [GOOD] >> KqpQueryDiscard::DiscardSelectMultiLine >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3466, MsgBus: 9928 2026-01-10T12:26:26.904999Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708280612348232:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:26.905903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:27.176460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:27.177140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:27.177234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:27.179458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:27.316000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:27.359955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:27.444770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:27.444812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:27.444819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:27.444893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:27.916289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:27.980152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:27.993767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:28.039898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.225741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.391990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.452121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.410308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708297792219236:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.410477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.410967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708297792219246:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.411064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:30.740599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.779622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.819280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.850410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.885564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.923753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:30.965267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.025129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.110401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708302087187411:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.110514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.111352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708302087187417:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.111376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708302087187416:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.111399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:31.116325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:31.129737Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708302087187420:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:31.207444Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708302087187471:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:31.905167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708280612348232:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:31.905246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9772, MsgBus: 2918 2026-01-10T12:26:33.992230Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708309352608253:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:33.992299Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:7 ... lterResource ok# false data# peer# 2026-01-10T12:27:09.869782Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85747980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-10T12:27:09.869888Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85746480] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-10T12:27:09.869981Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85745d80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-10T12:27:09.870075Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85745680] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-10T12:27:09.870175Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85744180] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-10T12:27:09.870259Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85743a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-10T12:27:09.870392Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85742c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-10T12:27:09.870451Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85744f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-10T12:27:09.870579Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85742580] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-10T12:27:09.870630Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8585d680] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-10T12:27:09.870757Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8585c880] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-10T12:27:09.870844Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85741e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-10T12:27:09.870963Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85741780] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-10T12:27:09.871049Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85741080] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-10T12:27:09.871162Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85740980] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-10T12:27:09.871255Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8573fb80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-10T12:27:09.871342Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572f180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-10T12:27:09.871451Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572ea80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-10T12:27:09.871536Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572e380] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-10T12:27:09.871731Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572d580] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-10T12:27:09.871926Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572ce80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-10T12:27:09.873173Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572dc80] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-10T12:27:09.873437Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572c080] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-10T12:27:09.873651Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85528a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-10T12:27:09.873878Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85561880] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-10T12:27:09.873901Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85525980] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-10T12:27:09.874087Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85924080] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-10T12:27:09.874106Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85860080] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-10T12:27:09.874303Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85860e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-10T12:27:09.874323Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8585dd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-10T12:27:09.874484Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85863180] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-10T12:27:09.874537Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85856680] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-10T12:27:09.874699Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85859080] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-10T12:27:09.874745Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85859e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-10T12:27:09.874893Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85832f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-10T12:27:09.874951Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85834480] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-10T12:27:09.875079Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc855ae880] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-10T12:27:09.875144Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc855ab780] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-10T12:27:09.875263Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858b0f80] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-10T12:27:09.875339Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858b1d80] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-10T12:27:09.875465Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858b0180] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-10T12:27:09.875538Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858a8a80] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-10T12:27:09.875656Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858ab480] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-10T12:27:09.875737Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858ac280] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-10T12:27:09.875870Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85aefd80] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-10T12:27:09.875933Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85855180] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-10T12:27:09.875964Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8585cf80] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-10T12:27:09.876133Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85864d80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-10T12:27:09.876170Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858aec80] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-10T12:27:09.876322Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858afa80] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-10T12:27:09.876359Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858a6080] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-10T12:27:09.876522Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858ac980] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-10T12:27:09.876551Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858a9f80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-10T12:27:09.876740Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858a7580] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-10T12:27:09.876753Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc858af380] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-10T12:27:09.876940Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8556ab80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-10T12:27:09.876946Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8556b980] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-10T12:27:09.877133Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8556c780] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-10T12:27:09.877139Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85560a80] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-10T12:27:09.877320Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc85524b80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-10T12:27:09.877345Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572b980] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-10T12:27:09.877516Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7cbc8572b280] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] Test command err: 2026-01-10T12:26:18.457435Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:18.488532Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:18.488816Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:18.489690Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:18.490049Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:26:18.491154Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:26:18.491222Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:18.492214Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-10T12:26:18.492258Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:18.492374Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:18.492530Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:18.505652Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:18.505713Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:18.508161Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.508364Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.508521Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.508661Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.508824Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.508971Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.509106Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:18.509135Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:18.509218Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-10T12:26:18.509273Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-10T12:26:18.509327Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:18.509389Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:18.510109Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:18.520615Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:18.520685Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:18.522447Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:18.522884Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:18.522928Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:26:18.523030Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:18.523512Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:18.524526Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:18.524779Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-10T12:26:18.524847Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-10T12:26:18.524901Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T12:26:18.524963Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:26:18.524994Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:26:18.525034Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:18.535918Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-10T12:26:18.535968Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-10T12:26:18.536202Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-10T12:26:18.536255Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:18.536417Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:18.536566Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-10T12:26:18.536627Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-10T12:26:18.536680Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-10T12:26:18.543895Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-10T12:26:18.545244Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:18.545399Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:18.545780Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-10T12:26:18.545841Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:26:18.546055Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-10T12:26:18.547485Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-10T12:26:18.547552Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-10T12:26:18.553379Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:18.553630Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:18.553835Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T12:26:18.553992Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-10T12:26:18.554055Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-10T12:26:18.554106Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:18.555102Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:18.555162Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:26:18.555294Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-10T12:26:18.555337Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-10T12:26:18.555355Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-10T12:26:18.555373Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-10T12:26:18.555421Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest: ... tatus: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.130932Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.131004Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:643:2249] followers: 2 2026-01-10T12:27:10.131039Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.131084Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [51:897:2133] 2026-01-10T12:27:10.131110Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [51:897:2133] 2026-01-10T12:27:10.131319Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [51:899:2134] 2026-01-10T12:27:10.131349Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [51:899:2134] 2026-01-10T12:27:10.131401Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:643:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-10T12:27:10.131437Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.131526Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:27:10.131761Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:27:10.131811Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:27:10.131853Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:27:10.132038Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.132145Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.132217Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:643:2249] followers: 2 2026-01-10T12:27:10.132252Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.132302Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [51:899:2134] 2026-01-10T12:27:10.132328Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [51:899:2134] 2026-01-10T12:27:10.132542Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [51:901:2135] 2026-01-10T12:27:10.132569Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [51:901:2135] 2026-01-10T12:27:10.132628Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:643:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-10T12:27:10.132661Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.132754Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:27:10.132986Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:27:10.133038Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:27:10.133084Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:27:10.133253Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.133332Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.133398Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:643:2249] followers: 2 2026-01-10T12:27:10.133436Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.133501Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [51:901:2135] 2026-01-10T12:27:10.133528Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [51:901:2135] 2026-01-10T12:27:10.133690Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [51:903:2136] 2026-01-10T12:27:10.133719Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [51:903:2136] 2026-01-10T12:27:10.133766Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:643:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-10T12:27:10.133797Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.133904Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:27:10.134123Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:27:10.134167Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:27:10.134207Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:27:10.134372Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.134448Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.134514Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:643:2249] followers: 2 2026-01-10T12:27:10.134545Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.134612Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [51:903:2136] 2026-01-10T12:27:10.134640Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [51:903:2136] 2026-01-10T12:27:10.134802Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [51:905:2137] 2026-01-10T12:27:10.134829Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [51:905:2137] 2026-01-10T12:27:10.134884Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [48:643:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-10T12:27:10.134916Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.135003Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:27:10.135219Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:27:10.135264Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:27:10.135299Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:27:10.135458Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.135532Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [48:643:2249] CurrentLeaderTablet: [48:674:2270] CurrentGeneration: 1 CurrentStep: 0} 2026-01-10T12:27:10.135599Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [48:643:2249] followers: 2 2026-01-10T12:27:10.135630Z node 51 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 51 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2026-01-10T12:27:10.135699Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [51:905:2137] 2026-01-10T12:27:10.135726Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [51:905:2137] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage >> KqpStats::RequestUnitForBadRequestExecute >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 65392, MsgBus: 24757 2026-01-10T12:24:55.860824Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707888353922529:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.861230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.068188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.113222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.113325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.149791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.235732Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.315053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.315072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.315083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.315174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.336864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.685767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 16 2026-01-10T12:24:56.868028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.216961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.374146Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:24:58.382425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.419200Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:24:58.442813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901238825433:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.442813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901238825441:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.442872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.443086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707901238825448:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.443126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.446284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.458054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707901238825447:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:24:58.578537Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707901238825500:2646] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2026-01-10T12:24:59.032275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.066634Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:24:59.071984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.107533Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2026-01-10T12:24:59.361293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.397100Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:24:59.403729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.443161Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2026-01-10T12:24:59.767857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.808131Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:24:59.815883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.856218Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2026-01-10T12:25:00.193520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.238476Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:25:00.244837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.282385Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2026-01-10T12:25:00.608091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.650257Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-10T12:25:00.656872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.860528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707888353922529:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:00.860622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2026-01-10T12:25:01.063866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... = 1768048015371739 2026-01-10T12:26:55.656139Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:55.675551Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:55.675601Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:55.732303Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:55.737155Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:55.737187Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:55.737201Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:55.737331Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:55.986239Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:56.403842Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:56.797585Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:00.387384Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7593708405534242991:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:00.387494Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:01.323237Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708431304047488:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.323369Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708431304047498:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.323472Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.324572Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7593708431304047503:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.324705Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.330196Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:01.355751Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7593708431304047502:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:27:01.456751Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7593708431304047559:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:01.501860Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.329952Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Trying to start YDB, gRPC: 22474, MsgBus: 12071 2026-01-10T12:27:04.723466Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7593708441402782820:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:04.733038Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:04.752099Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:04.863903Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:04.872249Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593708441402782699:2081] 1768048024710885 != 1768048024710888 2026-01-10T12:27:04.881402Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:04.881502Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:04.886882Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:05.020914Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:05.020947Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:05.020960Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:05.021075Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:05.054309Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:05.733607Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:05.807776Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:09.724210Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708441402782820:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:09.724321Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:10.529317Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708467172587371:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:10.529325Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708467172587382:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:10.529418Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:10.530049Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708467172587386:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:10.530163Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:10.535505Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:10.549840Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708467172587385:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:27:10.645807Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708467172587438:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:10.753561Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpQuery::CreateAsSelect_DisableDataShard [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpQuery::QuerySkipHasNoColumns [GOOD] >> KqpQuery::QueryResultsTruncated >> KqpStats::DeferredEffects-UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpQuery::Now >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpQuery::DdlInDataQuery [GOOD] >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestExternalBoot >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpExplain::UpdateOn-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DdlInDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 21277, MsgBus: 19241 2026-01-10T12:26:45.606458Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708359234926537:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:45.606500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:45.641763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:45.912185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:45.960977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:45.961068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:45.990128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:46.019475Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:46.032220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708359234926508:2081] 1768048005604360 != 1768048005604363 2026-01-10T12:26:46.099985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:46.100013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:46.100039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:46.100163Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:46.136888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:46.504642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:46.513691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:46.625549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:48.789476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708372119829282:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.789603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.790134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708372119829294:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.790176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708372119829295:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.790280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:48.794908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:48.808574Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708372119829298:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:26:48.913487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708372119829349:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:49.191725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.517436Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2M1NTY4MWUtODUwMmIyNTYtMTYzMzYzYTktNzgwZGVjZmY=, ActorId: [1:7593708376414796759:2340], ActorState: ExecuteState, LegacyTraceId: 01kekxz83x1ybwr6tzwrythyxg, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Decimal value for precision:  status# BAD_REQUEST issues# trace_id# Trying to start YDB, gRPC: 6588, MsgBus: 18348 2026-01-10T12:26:50.620315Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708381511008985:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:50.620468Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:50.675497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:50.727905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:50.727992Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:50.744249Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708381511008946:2081] 1768048010618960 != 1768048010618963 2026-01-10T12:26:50.760528Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:50.763416Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:50.850366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:50.909876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:50.909900Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:50.909908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:50.910020Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:51.406570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:51.626469Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:53.918052Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708394395911710:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.918131Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708394395911700:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.918275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.918683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708394395911730:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.918740Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.922219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:53.935632Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708394395911729:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:26:54.001099Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708394395911782:2534] txid# 281474976710659, issues: { message: "Check ... : schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.078157Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.165451Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:10.264514Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.340811Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.234542Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482810886148:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.234644Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.235157Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482810886158:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.235211Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.327356Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.369397Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.413164Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.493365Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.535219Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.623238Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.668637Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.730979Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.837394Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482810887031:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.837512Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.837712Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482810887036:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.837755Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482810887037:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.837787Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.842027Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:13.858127Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708482810887040:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:13.919968Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708482810887091:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:14.148756Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708465631015256:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:14.148817Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:15.947334Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708491400822013:2538], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-10T12:27:15.947734Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YmM4NzRmZmEtNmRhYjMxODYtOTIzMzczNDYtNWFlNzAxMTA=, ActorId: [5:7593708491400822005:2533], ActorState: ExecuteState, LegacyTraceId: 01keky022j056ddz4trdfrsevn, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 5 column: 30 } message: "Operation \'CreateTable\' can\'t be performed in data query" end_position { row: 5 column: 30 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-10T12:27:15.998719Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708491400822026:2541], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-10T12:27:15.999164Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YmM4NzRmZmEtNmRhYjMxODYtOTIzMzczNDYtNWFlNzAxMTA=, ActorId: [5:7593708491400822005:2533], ActorState: ExecuteState, LegacyTraceId: 01keky0244ffw7ev54nzmc2h0h, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 24 } message: "Operation \'DropTable\' can\'t be performed in data query" end_position { row: 2 column: 24 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-10T12:27:16.034690Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708495695789331:2545], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2026-01-10T12:27:16.037671Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=YmM4NzRmZmEtNmRhYjMxODYtOTIzMzczNDYtNWFlNzAxMTA=, ActorId: [5:7593708491400822005:2533], ActorState: ExecuteState, LegacyTraceId: 01keky025b2r46w4d4pa57pztr, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 54 } message: "Operation \'AlterTable\' can\'t be performed in data query" end_position { row: 2 column: 54 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::Explain [GOOD] >> KqpExplain::ExplainDataQuery >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27673, MsgBus: 9757 2026-01-10T12:26:38.507732Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708331305032743:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:38.516426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:38.809312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:38.809479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:38.812137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:38.863475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:38.925277Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:39.024905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:39.024929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:39.024943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:39.025075Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:39.161217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:39.464176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:39.527141Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:39.530044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.669941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.852800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.918685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.898485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708344189936428:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.898625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.899685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708344189936438:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.899754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.269678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.309838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.344605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.384029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.417916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.466172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.523716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.590573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.676643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708348484904604:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.676751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.677220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708348484904607:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.677304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.679240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708348484904611:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.684871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:42.703136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708348484904613:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:42.779797Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708348484904664:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:43.508883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708331305032743:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:43.508950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"], ... ) VolatileState: Connecting -> Connected 2026-01-10T12:27:09.728671Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:09.728691Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:09.728703Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:09.728767Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:09.732153Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:10.297706Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:10.326953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.399768Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.527132Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:10.576303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.647991Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.760268Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482294036435:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.760376Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.760701Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708482294036445:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.760757Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.891009Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.942822Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.989871Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.023917Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.063718Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.110980Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.183625Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.250023Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.342578Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708486589004610:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.342670Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.342754Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708486589004615:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.342932Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708486589004617:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.342986Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.347653Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:14.360192Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708486589004619:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:14.460195Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708465114165404:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:14.460290Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:14.500744Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708486589004670:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"Tables":["EightShard"],"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/EightShard","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"EightShard","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Columns":["Key"],"Name":"TableLookup","E-Cost":"0","E-Size":"0","LookupKeyColumns":["Key"],"Table":"EightShard"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> THiveTest::TestExternalBootWhenLocked [GOOD] >> THiveTest::TestExternalBootCounters >> KqpQuery::UpdateThenDelete-UseSink [GOOD] >> KqpQuery::ExecuteWriteQuery [GOOD] >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19076, MsgBus: 26683 2026-01-10T12:26:49.064375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708376444573254:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:49.064542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:49.101949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:49.332133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:49.360756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:49.360859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:49.409248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:49.459466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:49.534726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:49.534745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:49.534771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:49.534867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:49.621670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:49.963434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:49.969087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:50.010669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.079747Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:50.161536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.308293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:50.380899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.397225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708389329476775:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.397355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.397726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708389329476785:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.397776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.724139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.755922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.793088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.829447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.862472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.896758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.928939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.976028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:53.057890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393624444952:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.058006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.058078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393624444957:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.058165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393624444959:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.058202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:53.061655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:53.075155Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708393624444961:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:53.166030Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708393624445014:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:54.065044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708376444573254:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:54.065097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:55.116096Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708402214379929:2 ... cepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:08.691680Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708440344043366:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:08.691768Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8210, MsgBus: 25630 2026-01-10T12:27:11.948929Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708470946549127:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:11.958370Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:12.052156Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:12.089205Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:12.100434Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:12.100525Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:12.104582Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:12.181103Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:12.181131Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:12.181139Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:12.181233Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:12.343600Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:12.698095Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:12.717089Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:12.832470Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:12.963983Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:13.004769Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.126668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.048289Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708492421387442:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.048393Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.048838Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708492421387452:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.048898Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.136273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.177313Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.226444Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.269644Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.308945Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.356742Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.409237Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.480650Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.586808Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708492421388325:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.586921Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.587491Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708492421388330:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.587545Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708492421388331:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.587671Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:16.591985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:16.609862Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708492421388334:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:16.667379Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708492421388385:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:16.951868Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708470946549127:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:16.951948Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THiveTest::TestExternalBootCounters [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TSubDomainTest::FailIfAffectedSetNotInterior >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 18570, MsgBus: 14534 2026-01-10T12:26:15.117109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708234255083494:2135];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:15.118037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:15.331397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:15.340832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:15.340934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:15.402557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:15.421325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:15.545711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:15.545745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:15.545755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:15.545871Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:15.602213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:15.962275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:16.124516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:18.250988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708247139986178:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:18.251004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708247139986167:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:18.251132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:18.254664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708247139986182:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:18.254768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:18.255347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:18.266144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708247139986181:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:26:18.376576Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708247139986234:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:18.583217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 12384, MsgBus: 10890 2026-01-10T12:26:19.804630Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708247824483540:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:19.804703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:19.825355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:19.899129Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:19.901476Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708247824483514:2081] 1768047979803626 != 1768047979803629 2026-01-10T12:26:19.943373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:19.943501Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:19.951632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:20.016694Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:20.016726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:20.016734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:20.016815Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:20.050727Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:20.472661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:20.479183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:20.813450Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:23.143119Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708265004353583:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:23.143170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708265004353591:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:23.143189Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:23.143452Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708265004353598:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:23.143517Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:23.147024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:23.159991Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708265004353597:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:26:23.217739Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708265004353650:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:23.245083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:23.451484Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YmQ2NzQyYzYtZmYxM2M4MmUtM2QyNDQ4NTAtZTVkZWY2Yzc=, ActorId: [2:7593708265004353814:2352], ActorState: ExecuteState, LegacyTraceId: 01kekxyetq7vkvj20ggdh16tw1, Create QueryR ... source pool default not found or you don't have access permissions } 2026-01-10T12:26:43.569675Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.570952Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708351735561509:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.571024Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.655134Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.726179Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.769241Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.802432Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.839111Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.875218Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.916627Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.972220Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:44.063863Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708356030529676:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:44.063977Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:44.064059Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708356030529681:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:44.064326Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708356030529683:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:44.064384Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:44.068673Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:44.080331Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708356030529684:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:44.159536Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708356030529736:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:44.508760Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708334555690464:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:44.508854Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:46.243384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.588450Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:26:54.588479Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:56.401294Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-10T12:26:56.446168Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-10T12:26:57.651205Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-10T12:26:58.085208Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-10T12:27:03.651878Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxz4ve8bhe5h72bk171yas", SessionId: ydb://session/3?node_id=5&id=NWUyNDA2YjAtZGY3M2VjMTctZTk2ODFlNTQtZDBlZmM2ZWY=, Slow query, duration: 17.649856s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-10T12:27:04.036551Z node 5 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 68.1108 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037927 table: [/Root/.tmp/sessions/877eb163-4a83-2e97-1898-e29fa123c97a/Root/test_table_dd73dfaa-472b-8d3f-918c-bebaf0208e73] 2026-01-10T12:27:04.036664Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-10T12:27:04.787707Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxz4ve8bhe5h72bk171yas", SessionId: ydb://session/3?node_id=5&id=NWUyNDA2YjAtZGY3M2VjMTctZTk2ODFlNTQtZDBlZmM2ZWY=, Slow query, duration: 18.785649s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-10T12:27:04.793911Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:04.799044Z --------------- Start update --------------- 2026-01-10T12:27:04.805252Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:18.187007Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxzq7r041wcamdr0kk7dan", SessionId: ydb://session/3?node_id=5&id=NzU5MjBiOGEtOGZlNDhjODktOWI4MzRhZDctYmViNzMyMzg=, Slow query, duration: 13.357785s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n UPDATE test_table SET data = \"a\"\n ", parameters: 0b |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> KqpStats::DataQueryWithEffects-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] Test command err: 2026-01-10T12:26:04.177158Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.207664Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.207943Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.208835Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.209164Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.210210Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:74:2076] ControllerId# 72057594037932033 2026-01-10T12:26:04.210254Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.210398Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.210521Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.220127Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.220195Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.222639Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:81:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.222820Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:82:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.222969Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:83:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.223122Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:84:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.223269Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:85:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.223417Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:86:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.223558Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:87:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.223589Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.223683Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:74:2076] 2026-01-10T12:26:04.223715Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:74:2076] 2026-01-10T12:26:04.223769Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.223828Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.224551Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.224705Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.227640Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.227805Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.228166Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.228470Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:26:04.229650Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:26:04.229713Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.230746Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:98:2078] ControllerId# 72057594037932033 2026-01-10T12:26:04.230784Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.230862Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.230969Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.232961Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:67:2065] 2026-01-10T12:26:04.233035Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:67:2065] 2026-01-10T12:26:04.243018Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.243076Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.244811Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.244997Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245157Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245294Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245439Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245591Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245725Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.245775Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:04.245859Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:98:2078] 2026-01-10T12:26:04.245893Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:98:2078] 2026-01-10T12:26:04.245935Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:04.245975Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:04.246866Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:04.246941Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:04.251580Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:04.251742Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:04.252145Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:04.252430Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:04.253526Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-10T12:26:04.253566Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:04.253637Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:04.253802Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:04.272178Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:04.272231Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:04.274043Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274243Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274398Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274590Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274783Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.274966Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:04.275117Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targ ... 42: [cd7cbca5baaa92c4] Id# [72057594037927937:2:13:0:0:105:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:27:21.852615Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [cd7cbca5baaa92c4] restore Id# [72057594037927937:2:13:0:0:105:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:27:21.852713Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [cd7cbca5baaa92c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG33 2026-01-10T12:27:21.852800Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [cd7cbca5baaa92c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG32 2026-01-10T12:27:21.853008Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:61:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:13:0:0:105:1] FDS# 105 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:27:21.854553Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd7cbca5baaa92c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:13:0:0:105:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 28 } Cost# 80826 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 29 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-10T12:27:21.854710Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd7cbca5baaa92c4] Result# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:27:21.854825Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd7cbca5baaa92c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:27:21.855097Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:27:21.855285Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} commited cookie 1 for step 13 2026-01-10T12:27:21.855517Z node 68 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [68:483:2316] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [68:483:2316] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2026-01-10T12:27:21.856136Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:510:2342] 2026-01-10T12:27:21.856223Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:510:2342] 2026-01-10T12:27:21.856335Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:21.856420Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-10T12:27:21.856507Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:510:2342] 2026-01-10T12:27:21.856583Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:510:2342] 2026-01-10T12:27:21.856655Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:510:2342] 2026-01-10T12:27:21.856737Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:510:2342] 2026-01-10T12:27:21.856879Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:510:2342] 2026-01-10T12:27:21.857106Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:510:2342] 2026-01-10T12:27:21.857177Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:510:2342] 2026-01-10T12:27:21.857245Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:510:2342] 2026-01-10T12:27:21.857323Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:510:2342] 2026-01-10T12:27:21.857390Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:510:2342] 2026-01-10T12:27:21.857474Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:483:2316] EventType# 268697612 2026-01-10T12:27:21.857708Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-10T12:27:21.857801Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:27:21.858225Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{22, redo 182b alter 0b annex 0, ~{ 1, 4 } -{ }, 0 gb} 2026-01-10T12:27:21.858319Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:27:21.858573Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-10T12:27:21.858651Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:27:21.858895Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{23, redo 514b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-10T12:27:21.858992Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:27:21.870427Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [667a4d0d3c364da9] bootstrap ActorId# [68:513:2345] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:14:0:0:321:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:27:21.870633Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [667a4d0d3c364da9] Id# [72057594037927937:2:14:0:0:321:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:27:21.870737Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [667a4d0d3c364da9] restore Id# [72057594037927937:2:14:0:0:321:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:27:21.870840Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [667a4d0d3c364da9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG33 2026-01-10T12:27:21.870925Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [667a4d0d3c364da9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG32 2026-01-10T12:27:21.871122Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:61:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:14:0:0:321:1] FDS# 321 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:27:21.872635Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [667a4d0d3c364da9] received {EvVPutResult Status# OK ID# [72057594037927937:2:14:0:0:321:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 29 } Cost# 82527 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 30 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-10T12:27:21.872812Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [667a4d0d3c364da9] Result# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:27:21.872926Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [667a4d0d3c364da9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:27:21.873224Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:27:21.873391Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} commited cookie 1 for step 14 2026-01-10T12:27:21.874015Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:515:2347] 2026-01-10T12:27:21.874085Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:515:2347] 2026-01-10T12:27:21.874207Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:21.874288Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-10T12:27:21.874386Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:515:2347] 2026-01-10T12:27:21.874467Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:515:2347] 2026-01-10T12:27:21.874536Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:515:2347] 2026-01-10T12:27:21.874614Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:515:2347] 2026-01-10T12:27:21.874747Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:515:2347] 2026-01-10T12:27:21.874958Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:515:2347] 2026-01-10T12:27:21.875036Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:515:2347] 2026-01-10T12:27:21.875094Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:515:2347] 2026-01-10T12:27:21.875172Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:515:2347] 2026-01-10T12:27:21.875228Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:515:2347] 2026-01-10T12:27:21.875314Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:514:2346] EventType# 268830214 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> KqpQuery::Now [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpParams::Decimal+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8593, MsgBus: 25050 2026-01-10T12:26:30.517980Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708296619656859:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:30.523588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:30.828124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:30.914988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:30.915091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:30.918524Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708296619656813:2081] 1768047990514736 != 1768047990514739 2026-01-10T12:26:30.927516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:30.931265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:31.035676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:31.035695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:31.035701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:31.035774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:31.114133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:31.476641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:31.486413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:31.544745Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:31.562826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.688359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.842608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:31.908402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:33.812400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708309504560571:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.812513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.812851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708309504560581:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:33.812906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.194939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.228871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.262259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.292229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.322252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.357492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.392584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.443145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.516700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708313799528749:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.516766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.517029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708313799528755:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.517065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708313799528754:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.517159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.520982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:34.537393Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708313799528758:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:34.629697Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708313799528811:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:35.518350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708296619656859:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:35.518458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:36.274532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ons } 2026-01-10T12:27:15.110445Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.212215Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708470063652216:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:15.212314Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:15.235299Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.290638Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.329575Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.367159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.410978Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.452209Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.504843Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.587935Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.714546Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708491538491418:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.714668Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.715016Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708491538491424:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.715354Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708491538491423:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.715407Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.720032Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:15.734155Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708491538491427:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:15.820513Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708491538491478:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:18.439847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.499284Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.556394Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":18,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":17,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_1_2","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_2"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":15,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Delete","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_1","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_1_1"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Upsert","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","ReadRangesPointPrefixLen":"1","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","ReadRangesKeys":["Fk"],"Table":"SecondaryKeys\/Index\/indexImplTable","ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Subplan Name":"CTE Stage_5","Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_1","Node Type":"Precompute_1_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_2","Node Type":"Precompute_1_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"}]},{"name":"\/Root\/SecondaryKeys\/Index\/indexImplTable","reads":[{"columns":["Fk","Key"],"scan_by":["Fk [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":7,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Operators":[{"Name":"Delete","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":14,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Name":"Upsert","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":22,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16039, MsgBus: 8394 2026-01-10T12:26:48.023986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708376073924893:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:48.024052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:48.318346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:48.318472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:48.321127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:48.321921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:48.324235Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708376073924865:2081] 1768048008022525 != 1768048008022528 2026-01-10T12:26:48.339575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:48.440815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:48.440836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:48.440842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:48.440935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:48.476792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:48.926321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:48.940736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:48.999254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.074614Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:49.153059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.359834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.450122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.518479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708388958828626:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.518585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.518969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708388958828636:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.519028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.845692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.888006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.927433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.963652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.997976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.048219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.094618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.137049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.219562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393253796802:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.219653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.219923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393253796807:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.219984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708393253796808:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.220015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.224392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:52.237366Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708393253796811:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:52.335946Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708393253796862:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:53.024139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708376073924893:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:53.024188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:56.774371Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768048014635, ... main>:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 16092, MsgBus: 27018 2026-01-10T12:27:15.801898Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708490285392308:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:15.801945Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:15.829305Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:15.929753Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:15.929886Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:15.947094Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708490285392282:2081] 1768048035797441 != 1768048035797444 2026-01-10T12:27:15.967307Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:15.974875Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:16.046727Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:16.092746Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:16.092774Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:16.092782Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:16.092872Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:16.653931Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:16.661919Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:16.674696Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.774359Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.965948Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:17.044819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.152108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.968293Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708507465263341:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.968394Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.968709Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708507465263351:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.968743Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.047895Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.090873Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.136282Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.180252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.216608Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.258032Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.314664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.380949Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.486352Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708511760231518:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.486472Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.486540Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708511760231523:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.487012Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708511760231525:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.487066Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.490283Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:20.505706Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708511760231526:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:20.581058Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708511760231578:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:20.807145Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708490285392308:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:20.807279Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> TSubDomainTest::CreateTablet >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SelfJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20539, MsgBus: 16283 2026-01-10T12:26:38.182804Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708332501699006:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:38.182860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:38.431864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:38.435927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:38.436050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:38.518584Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708332501698981:2081] 1768047998181441 != 1768047998181444 2026-01-10T12:26:38.524637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:38.526748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:38.634039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:38.637640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:38.637655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:38.637659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:38.637709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:39.045857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:39.092977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.203431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:39.214555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.374699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.452196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.498505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708345386602742:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.498631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.499037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708345386602752:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.499121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:41.870413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.910074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.953895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.981922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.012367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.064843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.106677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.150936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.226323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708349681570919:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.226394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.226587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708349681570924:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.226660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708349681570925:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.226728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.230609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:42.245044Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708349681570928:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:42.315613Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708349681570979:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:43.183788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708332501699006:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:43.183864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22014, MsgBus: 9064 2026-01-10T12:26:45.044510Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708361323871926:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:45.044564Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... ble.cpp:689) 2026-01-10T12:27:19.001402Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708484607587072:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:19.001474Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:19.009517Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.051683Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.086501Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.125873Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.186321Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.247036Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.338994Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708506082426162:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.339097Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.339575Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708506082426167:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.339631Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708506082426168:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.339669Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.345699Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:19.359418Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708506082426171:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:19.426491Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708506082426222:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:21.700841Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:23.006240Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708518967328633:2585], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2026-01-10T12:27:23.008517Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=N2YxNTA2NDgtMjc2OTcyYjktNjY1ZjVmYWQtMzY2YzQyY2E=, ActorId: [5:7593708518967328631:2584], ActorState: ExecuteState, LegacyTraceId: 01keky08z98vb4fpb6z86ydp0p, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 17 } message: "At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda" end_position { row: 4 column: 17 } severity: 1 issues { position { row: 3 column: 25 } message: "At function: Parameter, At function: DataType" end_position { row: 3 column: 25 } severity: 1 issues { position { row: 3 column: 25 } message: "Invalid decimal precision: 99" end_position { row: 3 column: 25 } severity: 1 } } } } tx_id# trace_id# 2026-01-10T12:27:23.143881Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=YjhjYTAzZTgtYWQ0OWZmOTEtMzA0Mzk2MTEtY2M1ODFkNjc=, ActorId: [5:7593708523262295937:2589], ActorState: ExecuteState, LegacyTraceId: 01keky090gepmwas78237k88j3, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1555: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 status# BAD_REQUEST issues# trace_id# 2026-01-10T12:27:23.190217Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708523262295952:2595], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-10T12:27:23.192634Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=Mjk1YzQ5ZDctNDMzZTFiNWItOGIxZTAwM2ItYmEyNWI3MTc=, ActorId: [5:7593708523262295950:2594], ActorState: ExecuteState, LegacyTraceId: 01keky094t6p5p1ez0w4g79pt0, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 7 column: 29 } message: "At function: KiWriteTable!" end_position { row: 7 column: 29 } severity: 1 issues { position { row: 7 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 7 column: 50 } severity: 1 issues { position { row: 4 column: 25 } message: "Implicit decimal cast would lose precision" end_position { row: 4 column: 25 } severity: 1 } issues { position { row: 7 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 7 column: 50 } severity: 1 } } issues { position { row: 7 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 7 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:27:23.236409Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7593708523262295963:2600], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-10T12:27:23.240174Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NTllMDFjNzYtODljY2JkOGItNDA4ZTdkZmUtNTBhNGY5ZjU=, ActorId: [5:7593708523262295961:2599], ActorState: ExecuteState, LegacyTraceId: 01keky096d4swfhpy518f3mknd, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 3 column: 50 } severity: 1 issues { position { column: 14 } message: "Implicit decimal cast would lose precision" end_position { column: 14 } severity: 1 } issues { position { row: 3 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 3 column: 50 } severity: 1 } } issues { position { row: 3 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] Test command err: Trying to start YDB, gRPC: 21197, MsgBus: 29025 2026-01-10T12:26:39.074312Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708335972140001:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:39.074369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:39.314299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:39.318043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:39.318155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:39.321323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:39.444023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:39.445536Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708335972139973:2081] 1768047999072850 != 1768047999072853 2026-01-10T12:26:39.456137Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:39.457181Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:26:39.548787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:39.548818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:39.548827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:39.548935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:39.574971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:40.082561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:40.111078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:40.189175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.349064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.515236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.583900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.559891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708348857043732:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.560002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.560381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708348857043741:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.560448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:42.857909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.900030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.934645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.968464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:42.999989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.042867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.076433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.135877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.227639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708353152011914:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.227752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.228043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708353152011920:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.228101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708353152011919:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.228151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.232458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:43.245302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708353152011923:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:43.304198Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708353152011974:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:44.074582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708335972140001:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:44.074646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:46.691052Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593708366 ... VE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:12.819456Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:12.825019Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:12.884786Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:12.884811Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:12.884821Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:12.884918Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:12.919094Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:13.510862Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:13.521705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:13.534944Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.616544Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.754826Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:13.818721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.918590Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.249672Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708497318512891:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.249780Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.250294Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708497318512901:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.250351Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.337463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.390152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.427682Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.476098Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.539087Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.600055Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.648726Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.652996Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708475843674570:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:17.653065Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:17.715699Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.841215Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708497318513776:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.841363Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.841669Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708497318513781:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.841730Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708497318513782:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.841778Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.846825Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:17.863011Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708497318513785:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:17.922997Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708497318513836:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:24.516709Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7593708527383285599:2679], status: GENERIC_ERROR, issues:
:1:0: Error: mismatched input 'sub' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES} 2026-01-10T12:27:24.517369Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=NzlkNTRiMDEtNTBmNTVhNDEtZWFjMGM4N2EtZmE2MDAxNWU=, ActorId: [4:7593708527383285597:2678], ActorState: ExecuteState, LegacyTraceId: 01keky0aej17r44whc399r0k21, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 } message: "mismatched input \'sub\' expecting {\';\', \'(\', \'$\', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES}" end_position { row: 1 } severity: 1 } tx_id# trace_id# |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> KqpStats::CreateTableAsStats-IsOlap [GOOD] >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectView >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TPopulatorTest::Boot >> TPopulatorTest::MakeDir >> TPopulatorQuorumTest::TwoRingGroups >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> TPopulatorQuorumTest::OneRingGroup >> KqpQuery::DictJoin [GOOD] >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] >> TPopulatorQuorumTest::TwoRingGroups [GOOD] >> TPopulatorQuorumTest::OneRingGroup [GOOD] >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-10T12:27:30.869763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-10T12:27:30.879981Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-10T12:27:30.880104Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-10T12:27:30.883143Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-10T12:27:30.883212Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-10T12:27:30.883274Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-10T12:27:30.883358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-10T12:27:30.883382Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-10T12:27:30.883403Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-10T12:27:30.883437Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-10T12:27:30.883465Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-10T12:27:30.883486Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-10T12:27:30.883566Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:27:30.883646Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.883858Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-10T12:27:30.883920Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.884006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:27:30.884123Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-10T12:27:30.884215Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.884320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-10T12:27:30.884364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.884464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:27:30.884537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-10T12:27:30.884601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.884666Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-10T12:27:30.884709Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.884782Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:27:30.884833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.884893Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-10T12:27:30.884955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-10T12:27:30.885009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-10T12:27:30.885040Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.885135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:27:30.885199Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.885248Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-10T12:27:30.885272Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-10T12:27:30.885300Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.885363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:27:30.885399Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.885462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-10T12:27:30.885539Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 2026-01-10T12:27:30.885567Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2026-01-10T12:27:30.885735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-10T12:27:30.896299Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-10T12:27:30.896363Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-10T12:27:30.869696Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-10T12:27:30.878560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2026-01-10T12:27:30.878684Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-10T12:27:30.881793Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-10T12:27:30.881880Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2026-01-10T12:27:30.881935Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2026-01-10T12:27:30.882012Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-10T12:27:30.882057Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2026-01-10T12:27:30.882085Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2026-01-10T12:27:30.882129Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-10T12:27:30.882150Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2026-01-10T12:27:30.882169Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2026-01-10T12:27:30.882247Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-10T12:27:30.882348Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-10T12:27:30.882527Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2026-01-10T12:27:30.882607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-10T12:27:30.882702Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-10T12:27:30.882777Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-10T12:27:30.882877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-10T12:27:30.882984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2026-01-10T12:27:30.883031Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-10T12:27:30.883091Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-10T12:27:30.883148Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-10T12:27:30.883206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-10T12:27:30.883281Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2026-01-10T12:27:30.883327Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-10T12:27:30.883427Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-10T12:27:30.883506Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-10T12:27:30.883592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-10T12:27:30.883645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-10T12:27:30.883696Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2026-01-10T12:27:30.883727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.883811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-10T12:27:30.883859Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-10T12:27:30.883923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2026-01-10T12:27:30.883946Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 2026-01-10T12:27:30.883973Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.884037Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-10T12:27:30.884404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-10T12:27:30.884481Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2026-01-10T12:27:30.884553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 2026-01-10T12:27:30.884584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2026-01-10T12:27:30.884763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2026-01-10T12:27:30.896041Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 12345 2026-01-10T12:27:30.896165Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-10T12:27:30.870032Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-10T12:27:30.884169Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-10T12:27:30.884263Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-10T12:27:30.886978Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-10T12:27:30.887050Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-10T12:27:30.887105Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-10T12:27:30.887188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-10T12:27:30.887211Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-10T12:27:30.887230Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-10T12:27:30.887282Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-10T12:27:30.887322Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-10T12:27:30.887347Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-10T12:27:30.887422Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-10T12:27:30.887479Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-10T12:27:30.887506Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-10T12:27:30.887539Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-10T12:27:30.887583Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-10T12:27:30.887605Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-10T12:27:30.887623Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-10T12:27:30.887667Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-10T12:27:30.887697Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-10T12:27:30.887716Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-10T12:27:30.887785Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-10T12:27:30.887877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.888045Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-10T12:27:30.888167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.888250Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.888354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-10T12:27:30.888420Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-10T12:27:30.888521Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.888600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-10T12:27:30.888664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-10T12:27:30.888723Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.888794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-10T12:27:30.888858Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.888942Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:27:30.889000Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.889055Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-10T12:27:30.889115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-10T12:27:30.889175Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.889238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-10T12:27:30.889289Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:27:30.889337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.889426Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-10T12:27:30.889479Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:27:30.889519Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.889575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-10T12:27:30.889695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-10T12:27:30.889777Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-10T12:27:30.889848Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-10T12:27:30.889957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-10T12:27:30.890009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-10T12:27:30.890069Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.890142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2026-01-10T12:27:30.890202Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-10T12:27:30.890252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-10T12:27:30.890287Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-10T12:27:30.890328Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.890436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2026-01-10T12:27:30.890486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-10T12:27:30.890516Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.890586Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-10T12:27:30.890638Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.890688Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-10T12:27:30.890731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.890768Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] 2026-01-10T12:27:30.890811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-10T12:27:30.890841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-01-10T12:27:30.890892Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.890961Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-10T12:27:30.891003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.891081Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-10T12:27:30.891128Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.891176Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-10T12:27:30.891210Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-10T12:27:30.891249Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-10T12:27:30.891294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-10T12:27:30.891335Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-10T12:27:30.891383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-10T12:27:30.891468Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-10T12:27:30.891500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-10T12:27:30.891708Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2026-01-10T12:27:30.891754Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2026-01-10T12:27:30.891792Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-10T12:27:30.904560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-10T12:27:30.904651Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 65275, MsgBus: 19570 2026-01-10T12:26:54.793325Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708400016159768:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:54.794020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:55.066603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:55.069808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:55.069923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:55.106541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:55.175593Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708400016159624:2081] 1768048014764773 != 1768048014764776 2026-01-10T12:26:55.178181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:55.271321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:55.317808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:55.317835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:55.317848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:55.317967Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:55.746928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:55.753002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:55.793677Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:55.819553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:56.007611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:56.179558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:56.256831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.290145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708417196030684:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.290260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.292048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708417196030694:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.292133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.641561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.684157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.719322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.750224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.783116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.839871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.927972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.987292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.093133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708421490998867:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:59.093203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:59.093480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708421490998872:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:59.093608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708421490998873:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:59.093640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:59.097957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:59.111906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708421490998876:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:59.210826Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708421490998927:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:59.789480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708400016159768:2169];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:59.789546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12815, MsgBus: 15253 2026-01-10T12:27:02.174628Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsAct ... nt":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":8,"Max":8,"Min":8,"History":[7,8]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1495,"Max":1495,"Min":1495,"History":[7,1495]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"UpdateTimeMs":6,"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":11626,"CpuTimeUs":6855},"ProcessCpuTimeUs":1029,"TotalDurationUs":355371,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2026-01-10T12:27:28.092207Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:28.105853Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) query_phases { duration_us: 9952 table_access { name: "/Root/.tmp/sessions/b5ca8de2-4fa8-1b9d-3555-1187dc5db08e/Root/Destination_d45648d0-442e-c4db-3b74-4abb4b86748b" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 1362 affected_shards: 1 } compilation { duration_us: 11626 cpu_time_us: 6855 } process_cpu_time_us: 1029 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}},{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[4,8]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1495,\"Max\":1495,\"Min\":1495,\"History\":[4,1495]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":3,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[4,1048576]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":43,\"Max\":43,\"Min\":43,\"History\":[3,43]},\"Tasks\":1,\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"BaseTimeMs\":1768048047999,\"CpuTimeUs\":{\"Count\":1,\"Sum\":802,\"Max\":802,\"Min\":802,\"History\":[3,802]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[4,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[4,64]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":2192,\"Max\":2192,\"Min\":2192,\"History\":[4,2192]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":3}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[7,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[7,28]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1359,\"Max\":1359,\"Min\":1359,\"History\":[7,1359]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":3,\"Min\":2}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/b5ca8de2-4fa8-1b9d-3555-1187dc5db08e\\/Root\\/Destination_d45648d0-442e-c4db-3b74-4abb4b86748b\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":3000,\"Max\":3000,\"Min\":3000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[7,1048576]},\"BaseTimeMs\":1768048047999,\"CpuTimeUs\":{\"Count\":1,\"Sum\":560,\"Max\":560,\"Min\":560,\"History\":[7,560]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":3000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[7,8]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[7,8]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1495,\"Max\":1495,\"Min\":1495,\"History\":[7,1495]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":3,\"Min\":2}}}],\"UpdateTimeMs\":6,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":11626,\"CpuTimeUs\":6855},\"ProcessCpuTimeUs\":1029,\"TotalDurationUs\":355371,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:42\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"f8b4389b-d7a7606b-427df311-52386a87\") \'(\'\"_partition_mode\" \'\"single\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/b5ca8de2-4fa8-1b9d-3555-1187dc5db08e/Root/Destination_d45648d0-442e-c4db-3b74-4abb4b86748b\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"8be7c62e-b9bad8a1-1515c1ca-8d25cad8\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 355371 total_cpu_time_us: 9246 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/b5ca8de2-4fa8-1b9d-3555-1187dc5db08e/Root/Destination_d45648d0-442e-c4db-3b74-4abb4b86748b\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":47},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1768048048\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"e45f673c-4d00713-633a34f8-62d29556\",\"version\":\"1.0\"}" |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18999, MsgBus: 15790 2026-01-10T12:26:58.441809Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708416896763846:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:58.442812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:58.727177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:58.820179Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708416896763794:2081] 1768048018412137 != 1768048018412140 2026-01-10T12:26:58.826777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:58.832919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:58.833010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:58.835611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:58.900207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:58.987622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:58.987648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:58.987655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:58.987762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:59.448720Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:59.466430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:01.830293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708429781666579:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.830433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708429781666553:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.830532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.832758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708429781666585:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.832827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.834490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:01.847614Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708429781666584:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:27:01.966311Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708429781666637:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:02.282417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 65119, MsgBus: 1879 2026-01-10T12:27:03.401524Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708436904060855:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:03.401956Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:03.430233Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:03.566473Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:03.569896Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708436904060742:2081] 1768048023391841 != 1768048023391844 2026-01-10T12:27:03.580223Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:03.583093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:03.583183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:03.606544Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:03.688615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:03.688640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:03.688647Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:03.688719Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:03.877903Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:04.236814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:04.401518Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:07.155080Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708454083930815:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.155167Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.155334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708454083930827:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.155375Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708454083930829:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.155412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.159282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:07.175329Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708454083930831:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:27:07.257476Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708454083930882:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:07.294648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 11986, MsgBus: 25474 2026-0 ... ] ActorId: [4:7593708503511463807:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:18.477420Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708503511463859:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62685, MsgBus: 9775 2026-01-10T12:27:21.761280Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708515750128707:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:21.761446Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:21.782331Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:21.895148Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:21.897698Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:21.909888Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:21.940059Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:21.985133Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:22.008709Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:22.008737Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:22.008744Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:22.008831Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:22.535003Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:22.550162Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.665262Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.768271Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:22.826302Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.996294Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.081879Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708537224967010:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.081992Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.082414Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708537224967020:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.082466Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.158360Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.217747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.270975Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.320021Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.358466Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.416967Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.467236Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.544561Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.667377Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708537224967891:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.667455Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.667669Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708537224967896:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.667710Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708537224967897:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.667746Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.671936Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:26.685949Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708537224967900:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:26.752403Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708537224967951:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:26.761715Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708515750128707:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.761782Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2026-01-10T12:27:31.096162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:31.096236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... rSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:15:2062], cookie# 100 2026-01-10T12:27:31.552333Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:18:2065], cookie# 100 2026-01-10T12:27:31.552505Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2126], cookie# 100 2026-01-10T12:27:31.552541Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2026-01-10T12:27:31.552933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2127], cookie# 100 2026-01-10T12:27:31.553258Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:97:2125], cookie# 100 2026-01-10T12:27:31.553427Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:98:2126], cookie# 100 2026-01-10T12:27:31.553522Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000037 2026-01-10T12:27:31.553757Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:99:2127], cookie# 100 2026-01-10T12:27:31.553792Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 2026-01-10T12:27:31.555750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 555, preserialized size# 119 2026-01-10T12:27:31.555820Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 7 2026-01-10T12:27:31.555946Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-10T12:27:31.555988Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-10T12:27:31.556051Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 FAKE_COORDINATOR: Erasing txId 100 2026-01-10T12:27:31.556864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 314, preserialized size# 2 2026-01-10T12:27:31.556941Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 100, is deletion# false, version: 3 2026-01-10T12:27:31.557058Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:12:2059], cookie# 100 2026-01-10T12:27:31.557104Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:15:2062], cookie# 100 2026-01-10T12:27:31.557138Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:18:2065], cookie# 100 2026-01-10T12:27:31.557234Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:97:2125], cookie# 100 2026-01-10T12:27:31.557270Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-10T12:27:31.557303Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-10T12:27:31.557369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-10T12:27:31.557435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:98:2126], cookie# 100 2026-01-10T12:27:31.557467Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 7 2026-01-10T12:27:31.557882Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:99:2127], cookie# 100 2026-01-10T12:27:31.557927Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 100 2026-01-10T12:27:31.557989Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 100 2026-01-10T12:27:31.558041Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 100 2026-01-10T12:27:31.558127Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 100 2026-01-10T12:27:31.558361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 100 2026-01-10T12:27:31.558395Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-10T12:27:31.558450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 100 2026-01-10T12:27:31.558475Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2026-01-10T12:27:31.106652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:31.106731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 5956, MsgBus: 14931 2026-01-10T12:26:59.593961Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708421355881469:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:59.594040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:59.649512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:59.937812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:59.937962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:00.008959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:00.015692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:00.020203Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708421355881301:2081] 1768048019572822 != 1768048019572825 2026-01-10T12:27:00.031454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:00.218564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:00.218589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:00.218601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:00.218671Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:00.269605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:00.604272Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:00.792385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:00.800924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:00.884119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.052907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.230736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.320858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.337808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708438535752362:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.337988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.338384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708438535752372:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.338442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:03.703091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.742375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.778325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.814108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.856869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.922183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:03.962631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.011150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.106159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708442830720540:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.106260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.106394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708442830720545:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.106435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708442830720546:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.106472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.111335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:04.127490Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708442830720549:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:04.207932Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708442830720600:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:04.593148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708421355881469:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:04.593227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 21.989783Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708516767419758:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:21.989856Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:22.002939Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:22.128223Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:22.131077Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:22.131158Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:22.144220Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708516767419562:2081] 1768048041945117 != 1768048041945120 2026-01-10T12:27:22.167402Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:22.182623Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:22.288687Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:22.288718Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:22.288728Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:22.288843Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:22.363379Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:22.836522Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:22.846872Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:22.856027Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.929844Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:23.018921Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:23.155379Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:23.231545Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.213565Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708538242257915:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.213674Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.213995Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708538242257925:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.214053Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.319733Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.366042Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.413130Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.457289Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.509833Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.567152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.632776Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.701400Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.807398Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708538242258795:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.807473Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.807698Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708538242258801:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.807717Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708538242258800:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.807769Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.812676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:26.828109Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708538242258804:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:26.895943Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708538242258855:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:26.989032Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708516767419758:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.989113Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2026-01-10T12:25:59.265751Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.297347Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.299884Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.308343Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.309586Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.310713Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:74:2076] ControllerId# 72057594037932033 2026-01-10T12:25:59.310755Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.312226Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.312376Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.322177Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.322236Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.325254Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:81:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325377Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:82:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325455Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:83:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325543Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:84:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325620Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:85:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325694Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:86:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325761Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:73:2075] Create Queue# [3:87:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.325779Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.326415Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:74:2076] 2026-01-10T12:25:59.326458Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:74:2076] 2026-01-10T12:25:59.326502Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.326554Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.328631Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.328743Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.331420Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.331592Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.331911Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.332199Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:25:59.333145Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:25:59.333186Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.334044Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:98:2078] ControllerId# 72057594037932033 2026-01-10T12:25:59.334083Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.334144Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.334252Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.335913Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:67:2065] 2026-01-10T12:25:59.335951Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:67:2065] 2026-01-10T12:25:59.346295Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.346348Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.348013Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348196Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348327Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348458Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348583Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348713Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348855Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:97:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.348893Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.348954Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:98:2078] 2026-01-10T12:25:59.348983Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:98:2078] 2026-01-10T12:25:59.349021Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.349055Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.349863Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.349934Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.353089Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.353223Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.353563Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.353776Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.354687Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-10T12:25:59.354718Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.354775Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.354887Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.365407Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.365463Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.367252Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.367425Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.367570Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.367745Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.367908Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.368050Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.368204Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targ ... BUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1974:2267] 2026-01-10T12:27:30.207466Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037892] forward result remote node 67 [62:2103:2492] 2026-01-10T12:27:30.207610Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [62:2103:2492] 2026-01-10T12:27:30.207656Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037892]::SendEvent [62:2103:2492] 2026-01-10T12:27:30.208040Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [62:2103:2492] 2026-01-10T12:27:30.208659Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [62:2103:2492] 2026-01-10T12:27:30.208708Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [62:2103:2492] 2026-01-10T12:27:30.210036Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [62:2107:2494] 2026-01-10T12:27:30.210078Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [62:2107:2494] 2026-01-10T12:27:30.210142Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [67:1314:2100] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:30.210184Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1314:2100] 2026-01-10T12:27:30.210259Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 67 [62:2107:2494] 2026-01-10T12:27:30.210392Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [62:2107:2494] 2026-01-10T12:27:30.210436Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037893]::SendEvent [62:2107:2494] 2026-01-10T12:27:30.210632Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [62:2107:2494] 2026-01-10T12:27:30.211000Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [62:2107:2494] 2026-01-10T12:27:30.211038Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [62:2107:2494] 2026-01-10T12:27:30.212422Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [62:2110:2496] 2026-01-10T12:27:30.212471Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [62:2110:2496] 2026-01-10T12:27:30.212549Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [66:1320:2143] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:30.212589Z node 62 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-10T12:27:30.212804Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:27:30.212978Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2026-01-10T12:27:30.213035Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2026-01-10T12:27:30.213133Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2026-01-10T12:27:30.213194Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1976:2268] CurrentLeaderTablet: [67:1981:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-01-10T12:27:30.213328Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1976:2268] CurrentLeaderTablet: [67:1981:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-01-10T12:27:30.213397Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037894 leader: [67:1976:2268] followers: 0 2026-01-10T12:27:30.213451Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1976:2268] 2026-01-10T12:27:30.213543Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 67 [62:2110:2496] 2026-01-10T12:27:30.213703Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [62:2110:2496] 2026-01-10T12:27:30.213748Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037894]::SendEvent [62:2110:2496] 2026-01-10T12:27:30.214101Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [62:2110:2496] 2026-01-10T12:27:30.214592Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [62:2110:2496] 2026-01-10T12:27:30.214634Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [62:2110:2496] 2026-01-10T12:27:30.215881Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [62:2114:2498] 2026-01-10T12:27:30.215922Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [62:2114:2498] 2026-01-10T12:27:30.215987Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [67:1822:2195] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:30.216026Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1822:2195] 2026-01-10T12:27:30.217816Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 67 [62:2114:2498] 2026-01-10T12:27:30.217961Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [62:2114:2498] 2026-01-10T12:27:30.218007Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037895]::SendEvent [62:2114:2498] 2026-01-10T12:27:30.218273Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [62:2114:2498] 2026-01-10T12:27:30.218695Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [62:2114:2498] 2026-01-10T12:27:30.218737Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [62:2114:2498] 2026-01-10T12:27:30.220096Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [62:2117:2500] 2026-01-10T12:27:30.220142Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [62:2117:2500] 2026-01-10T12:27:30.220204Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [67:1825:2197] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:30.220245Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1825:2197] 2026-01-10T12:27:30.220330Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 67 [62:2117:2500] 2026-01-10T12:27:30.220417Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [62:2117:2500] 2026-01-10T12:27:30.220459Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037896]::SendEvent [62:2117:2500] 2026-01-10T12:27:30.220719Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [62:2117:2500] 2026-01-10T12:27:30.221081Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [62:2117:2500] 2026-01-10T12:27:30.221123Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [62:2117:2500] 2026-01-10T12:27:30.222513Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [62:2119:2501] 2026-01-10T12:27:30.222596Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [62:2119:2501] 2026-01-10T12:27:30.222716Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [62:618:2179] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:27:30.222805Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [62:618:2179] 2026-01-10T12:27:30.222930Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [62:2119:2501] 2026-01-10T12:27:30.223034Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [62:2119:2501] 2026-01-10T12:27:30.223124Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [62:2119:2501] 2026-01-10T12:27:30.223213Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [62:2119:2501] 2026-01-10T12:27:30.223367Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [62:2119:2501] 2026-01-10T12:27:30.223677Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [62:2119:2501] 2026-01-10T12:27:30.223752Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [62:2119:2501] 2026-01-10T12:27:30.223809Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [62:2119:2501] 2026-01-10T12:27:30.223897Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [62:2119:2501] 2026-01-10T12:27:30.223969Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [62:2119:2501] 2026-01-10T12:27:30.224060Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [62:587:2174] EventType# 268697616 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |95.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::QueryExplain [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TKeyValueTracingTest::WriteSmall >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::ReadHuge >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> KqpStats::SelfJoin [GOOD] >> KqpParams::InvalidJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: Trying to start YDB, gRPC: 24203, MsgBus: 9307 2026-01-10T12:26:48.326673Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708374394462124:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:48.344412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:48.601405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:48.626864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:48.626959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:48.661305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:48.735247Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:48.740175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708374394462082:2081] 1768048008316378 != 1768048008316381 2026-01-10T12:26:48.768059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:48.792670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:48.792696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:48.792708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:48.792795Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:49.264667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:49.275771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:26:49.332064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.355054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:49.506959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.668527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.742999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.790287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708387279365833:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.790426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.790906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708387279365843:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.790952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.125747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.162283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.192402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.235835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.262708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.299320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.350927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.395816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.473459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708391574334009:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.473580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.474011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708391574334014:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.474062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708391574334015:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.474179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.478274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:52.493144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708391574334018:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:52.563164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708391574334069:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:53.325812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708374394462124:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:53.325882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:54.142481Z node 1 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d0c692e0b80] received request Name# Ydb.Table.V1.TableService/ExecuteDataQuery ok# true data# s ... ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:25.456574Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:25.464207Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:25.673363Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:25.773277Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.263507Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708550938200253:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.263629Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.264001Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708550938200263:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.264063Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.362460Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.414583Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.427821Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708529463361946:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:29.427898Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:29.483166Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.523531Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.575865Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.621587Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.680984Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.749063Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.878507Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708550938201135:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.878615Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.878955Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708550938201140:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.879006Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708550938201141:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.879127Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:29.883333Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:29.896178Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708550938201144:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:29.960230Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708550938201195:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:45" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 '('('"ItemsLimit" $3) '('"Sequential" '1) '('"PointPrefixLen" '1))) (let $5 (Uint32 '1)) (let $6 (KqpRowsSourceSettings $1 $2 $4 '((KqlKeyExc $5 (String '"Name")) (KqlKeyInc $5)))) (let $7 (OptionalType (DataType 'String))) (let $8 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $7) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $7))) (let $9 '('('"_logical_id" '715) '('"_id" '"35a4e043-65c6f20d-33df553a-4c58cff2") '('"_partition_mode" '"single") '('"_wide_channels" $8))) (let $10 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $6)) (lambda '($14) (block '( (let $15 (lambda '($16) (Member $16 '"Amount") (Member $16 '"Comment") (Member $16 '"Group") (Member $16 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $14) $3) $15))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '"0"))) (let $12 (DqPhyStage '($11) (lambda '($17) (FromFlow (NarrowMap (Take (ToFlow $17) $3) (lambda '($18 $19 $20 $21) (AsStruct '('"Amount" $18) '('"Comment" $19) '('"Group" $20) '('"Name" $21)))))) '('('"_logical_id" '728) '('"_id" '"f739a9bd-52ff49e7-3947e911-836bf012")))) (let $13 (DqCnResult (TDqOutput $12 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($10 $12) '($13) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $8) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::WriteHuge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7517, MsgBus: 19419 2026-01-10T12:26:39.580191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708333902730928:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:39.580343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:39.880169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:39.986840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:40.000946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:40.001050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:40.004360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:40.133084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:40.133116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:40.133130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:40.133218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:40.164712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:40.585006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:40.589178Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:40.635086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.776973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:40.951761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:41.048655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.013253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708351082601863:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.013424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.016556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708351082601873:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.016660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.298144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.332464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.371031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.406359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.440233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.476399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.514741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.574216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:43.664371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708351082602744:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.664455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.668585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708351082602749:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.668650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708351082602750:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.668696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:43.672881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:43.686692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708351082602753:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:43.760337Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708351082602804:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:44.579244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708333902730928:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:44.579314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:45.461479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:45.501682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... pId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.641918Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.846041Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:22.999992Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.143146Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708536887170118:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.143241Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.143539Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708536887170128:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.143589Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.242994Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.301102Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.353055Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.420598Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.423510Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708515412331828:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.423806Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:26.475619Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.542471Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.596435Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.670073Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:26.769653Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708536887171006:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.769756Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.769902Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708536887171011:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.769938Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708536887171012:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.769991Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:26.775471Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:26.792702Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708536887171015:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:26.875311Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708536887171066:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:29.707279Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.767248Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.841946Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> TKeyValueTracingTest::WriteSmall [GOOD] >> TKeyValueTracingTest::ReadSmall [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 29318, MsgBus: 20023 2026-01-10T12:27:03.184303Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708438033549679:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:03.187538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:03.432493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:03.462285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:03.462409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:03.499654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:03.504648Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:03.506099Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708438033549538:2081] 1768048023138797 != 1768048023138800 2026-01-10T12:27:03.643115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:03.643140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:03.643152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:03.643275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:03.717972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:04.189432Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:04.204449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:04.216396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:04.279668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.440283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.607207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.700384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:06.869351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708450918453291:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:06.869539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:06.870111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708450918453301:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:06.870181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.274384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.307636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.341620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.380232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.435787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.485778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.567578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.625174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.713601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708455213421474:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.713685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.713985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708455213421479:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.713992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708455213421480:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.714030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.718074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:07.730101Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708455213421483:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:07.826133Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708455213421536:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:08.183447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708438033549679:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:08.183556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators": ... Actor] ActorId: [4:7593708557649710046:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.117142Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.122435Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:31.140164Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708557649710050:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:31.236608Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708557649710101:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:31.301067Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708536174870844:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:31.301178Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"0","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[2,43]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":1718,"Max":1718,"Min":1718,"History":[2,1718]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[2,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Introspections":["1 tasks for a single\/sequential source scan"],"WaitOutputTimeUs":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[2,38]},"Tasks":1,"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":48,"Max":48,"Min":48}}],"BaseTimeMs":1768048053508,"CpuTimeUs":{"Count":1,"Sum":709,"Max":709,"Min":709,"History":[2,709]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[2,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[2,96]},"WaitTimeUs":{"Count":1,"Sum":1868,"Max":1868,"Min":1868,"History":[2,1868]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":2}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"WaitOutputTimeUs":{"Count":1,"Sum":55,"Max":55,"Min":55,"History":[4,55]},"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":24,"Max":24,"Min":24}}],"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":43,"Max":43,"Min":43},"Introspections":["1 tasks same as previous stage"],"DurationUs":{"Count":1,"Sum":3000,"Max":3000,"Min":3000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"BaseTimeMs":1768048053508,"Output":[{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[4,97]},"WaitTimeUs":{"Count":1,"Sum":3135,"Max":3135,"Min":3135,"History":[4,3135]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}},{"Pop":{},"Name":"6","Push":{}}],"CpuTimeUs":{"Count":1,"Sum":512,"Max":512,"Min":512,"History":[4,512]},"StageDurationUs":3000,"WaitInputTimeUs":{"Count":1,"Sum":2759,"Max":2759,"Min":2759,"History":[4,2759]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[4,43]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":43,"Max":43,"Min":43,"History":[4,43]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":1718,"Max":1718,"Min":1718,"History":[4,1718]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":4,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[4,25]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":3469,"Max":3469,"Min":3469,"History":[4,3469]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":4,"Min":1}}},{"Pop":{},"Name":"8","Push":{}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Sum":97,"Max":97,"Min":97},"WaitOutputTimeUs":{"Count":1,"Sum":21,"Max":21,"Min":21,"History":[4,21]},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":0,"BaseTimeMs":1768048053508,"CpuTimeUs":{"Count":1,"Sum":767,"Max":767,"Min":767,"History":[4,767]},"UpdateTimeMs":4,"Input":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[4,97]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":97,"Max":97,"Min":97,"History":[4,97]},"WaitTimeUs":{"Count":1,"Sum":3135,"Max":3135,"Min":3135,"History":[4,3135]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[5,1048576]},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Sum":25,"Max":25,"Min":25},"Tasks":1,"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"StageDurationUs":0,"BaseTimeMs":1768048053508,"CpuTimeUs":{"Count":1,"Sum":631,"Max":631,"Min":631,"History":[5,631]},"UpdateTimeMs":4,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":25,"Max":25,"Min":25,"History":[5,25]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":3469,"Max":3469,"Min":3469,"History":[5,3469]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":4,"Min":1}}},{"Pop":{},"Name":"6","Push":{}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":359595,"CpuTimeUs":353529},"ProcessCpuTimeUs":412,"TotalDurationUs":388230,"ResourcePoolId":"default","QueuedTimeUs":838},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-SelfCpu":0.767,"A-Cpu":0.767,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-SelfCpu":0.631,"A-Cpu":1.398,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 14509, MsgBus: 23259 2026-01-10T12:26:56.661107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708410679591390:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:56.661180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:56.958868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:57.065066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:57.065168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:57.072602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:57.084035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:57.167070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:57.224763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:57.224787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:57.224794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:57.224887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:57.664631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:57.755582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:57.768112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:57.827524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.002490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.162163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.230486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.246677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708427859462320:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.246819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.247432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708427859462330:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.247504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:00.654724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.695495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.793061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.863575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.899442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.939029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:00.988792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.073639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.164402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708432154430496:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.164522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.168366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708432154430501:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.168453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708432154430502:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.168808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.173442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:01.192163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708432154430505:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:01.284615Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708432154430558:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:01.664187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708410679591390:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:01.664248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:03.101127Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=Y2IwZDUyYjQtNTU4NmI1NjQtMTU4NThiNGItMWQ0ZGEzNjM=, ActorId: [1:7593708436449398149:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxzncddkd9z63m47kktg7v, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_sessi ... ROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:27.039550Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:27.067105Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:27.067215Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:27.121937Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:27.247420Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:27.288792Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:27.288822Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:27.288834Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:27.288952Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:27.846507Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:28.029966Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:28.047063Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:28.063195Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:28.160280Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:28.419458Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:28.520626Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.118779Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708560474610792:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.118860Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.119236Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708560474610802:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.119323Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.206060Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.246412Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.291611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.340612Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.381938Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.437996Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.502949Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.576500Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.694710Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708560474611677:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.694847Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.695108Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708560474611682:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.695152Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708560474611683:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.695285Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:31.699292Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:31.713999Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708560474611686:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:31.769164Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708560474611737:3795] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:31.828318Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708538999772468:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:31.828382Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:33.711702Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.864373Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=YTBlYTM3ZDUtM2ZhNjZmOTgtOTU4OWQ1OWMtODU3YjU1MWM=, ActorId: [5:7593708569064546640:2533], ActorState: ExecuteState, LegacyTraceId: 01keky0kg1a1q2nygnre5ggj96, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value status# BAD_REQUEST issues# trace_id#
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpExplain::CreateTableAs+Stats |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TKesusTest::TestAcquireLocks >> TKesusTest::TestRegisterProxy >> TKesusTest::TestSessionDetach >> TKesusTest::TestQuoterResourceDescribe >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestReleaseLockFailure >> TKeyValueTracingTest::WriteHuge [GOOD] >> TSubDomainTest::GenericCases [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachOutOfSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 30906, MsgBus: 28728 ... waiting for SysViewsRoster update finished 2026-01-10T12:26:31.570888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:31.689866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:26:31.701732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:26:31.702205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:26:31.702326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:26:32.097982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:32.098140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:32.105007Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768047988505358 != 1768047988505361 2026-01-10T12:26:32.118053Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:32.166247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:32.294737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:26:32.669829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:32.669897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:32.669934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:32.670585Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:32.683453Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:33.070679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:33.148337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:33.375907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:33.744632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.023940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:34.826275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1905:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.826648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.827931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1979:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.828103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:34.861091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:35.046559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:35.308358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:35.600877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:35.852537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.113177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.387322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.686892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:37.031427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:37.031546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:37.031950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:37.032032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:37.032136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:37.038140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:37.202161Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:26:37.250662Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:39.403308Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3157:4452] TxId: 281474976715673. Ctx: { TraceId: 01kekxyycq0f4hva7484qdqvxh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDA0NmRjOS1iOTk2NDA1MC0zNjhiZTcxYS02YjhhYTI5OA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPEC ... eateResource ok# false data# peer# 2026-01-10T12:27:35.016238Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e36c80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-10T12:27:35.016449Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b1bd80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-10T12:27:35.016678Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e38f80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-10T12:27:35.016865Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2cb80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-10T12:27:35.017095Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3c780] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-10T12:27:35.017256Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3b980] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-10T12:27:35.017318Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3ab80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-10T12:27:35.017513Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3ce80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-10T12:27:35.017555Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3c080] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-10T12:27:35.017706Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89c7e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-10T12:27:35.017823Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89ce080] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-10T12:27:35.017915Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3a480] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-10T12:27:35.018099Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e39680] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-10T12:27:35.018104Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e38180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-10T12:27:35.018299Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e37a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-10T12:27:35.018379Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e37380] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-10T12:27:35.018493Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e35780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-10T12:27:35.018672Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e34280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-10T12:27:35.018696Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e34980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-10T12:27:35.018879Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e33b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-10T12:27:35.018963Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e32680] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-10T12:27:35.019060Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e32d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-10T12:27:35.019239Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e21c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-10T12:27:35.019282Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e36580] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-10T12:27:35.019497Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e38880] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-10T12:27:35.019514Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3b280] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-10T12:27:35.019731Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e3d580] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-10T12:27:35.019798Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e39d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-10T12:27:35.019943Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8bfe080] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-10T12:27:35.020052Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89e6880] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-10T12:27:35.020189Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b3a780] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-10T12:27:35.020342Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89c1580] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-10T12:27:35.020479Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89bf280] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-10T12:27:35.020599Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89c3180] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-10T12:27:35.020764Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d89c9380] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-10T12:27:35.020881Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8bfd980] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-10T12:27:35.021026Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8bfaf80] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-10T12:27:35.021124Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8a5a780] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-10T12:27:35.021379Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8a92080] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-10T12:27:35.021651Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e31f80] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-10T12:27:35.021806Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e30380] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-10T12:27:35.021963Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2e780] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-10T12:27:35.022094Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2e080] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-10T12:27:35.022274Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2d980] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-10T12:27:35.022344Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e33480] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-10T12:27:35.022588Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e31180] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-10T12:27:35.022588Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e41b80] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-10T12:27:35.022816Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2bd80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-10T12:27:35.022857Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2d280] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-10T12:27:35.023065Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8bec180] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-10T12:27:35.023153Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b71980] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-10T12:27:35.023341Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b6da80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-10T12:27:35.023424Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8c81b80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-10T12:27:35.023609Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b75180] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-10T12:27:35.023696Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b72080] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-10T12:27:35.023876Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e2af80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-10T12:27:35.023970Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e13c80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-10T12:27:35.024168Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e15180] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-10T12:27:35.024340Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e14a80] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-10T12:27:35.024435Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8b48780] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-10T12:27:35.024637Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8e13580] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-10T12:27:35.024736Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d54d8ab1880] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] Test command err: 2026-01-10T12:27:35.856431Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008303s |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas |95.1%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::CreateAsSelectView [GOOD] >> KqpQuery::CreateTableAs_MkDir >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-10T12:27:23.200658Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708526725089493:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:23.200721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:23.245781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:23.524165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:23.599136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:23.599278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:23.647542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:23.710815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:23.724698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:23.907436Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593708526725089694:2144] Handle TEvNavigate describe path dc-1 2026-01-10T12:27:23.907517Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593708526725090157:2451] HANDLE EvNavigateScheme dc-1 2026-01-10T12:27:23.907643Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708526725089704:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:23.907757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593708526725089923:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593708526725089704:2147], cookie# 1 2026-01-10T12:27:23.909588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708526725089958:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089955:2290], cookie# 1 2026-01-10T12:27:23.909649Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708526725089959:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089956:2290], cookie# 1 2026-01-10T12:27:23.909675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708526725089960:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089957:2290], cookie# 1 2026-01-10T12:27:23.909733Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708522430122044:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089958:2290], cookie# 1 2026-01-10T12:27:23.909799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708522430122047:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089959:2290], cookie# 1 2026-01-10T12:27:23.909870Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708522430122050:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708526725089960:2290], cookie# 1 2026-01-10T12:27:23.909923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708526725089958:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708522430122044:2051], cookie# 1 2026-01-10T12:27:23.909940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708526725089959:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708522430122047:2054], cookie# 1 2026-01-10T12:27:23.909955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708526725089960:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708522430122050:2057], cookie# 1 2026-01-10T12:27:23.910012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708526725089923:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708526725089955:2290], cookie# 1 2026-01-10T12:27:23.910047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593708526725089923:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:27:23.910067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708526725089923:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708526725089956:2290], cookie# 1 2026-01-10T12:27:23.910089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593708526725089923:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:27:23.910121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708526725089923:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708526725089957:2290], cookie# 1 2026-01-10T12:27:23.910150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593708526725089923:2290][/dc-1] Sync cookie mismatch: sender# [1:7593708526725089957:2290], cookie# 1, current cookie# 0 2026-01-10T12:27:23.910224Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593708526725089704:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:27:23.918107Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593708526725089704:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593708526725089923:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:23.918334Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593708526725089704:2147], cacheItem# { Subscriber: { Subscriber: [1:7593708526725089923:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:27:23.922639Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593708526725090158:2452], recipient# [1:7593708526725090157:2451], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:27:23.923014Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593708526725090157:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:27:23.969303Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593708526725090157:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:27:23.974430Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593708526725090157:2451] Handle TEvDescribeSchemeResult Forward to# [1:7593708526725090156:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:27:23.986033Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593708526725089694:2144] Handle TEvProposeTransaction 2026-01-10T12:27:23.986061Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593708526725089694:2144] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:27:23.986970Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593708526725089694:2144] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7593708526725090166:2459] 2026-01-10T12:27:24.088847Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7593708526725090166:2459] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: ... : owner# [4:7593708558156749183:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:34.995213Z node 4 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [4:7593708553861781581:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7593708571041652344:3035] 2026-01-10T12:27:34.995232Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7593708571041652327:3035][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7593708571041652340:3035] 2026-01-10T12:27:34.995266Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7593708571041652327:3035][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7593708558156749183:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:34.995284Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:27:34.995289Z node 4 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [4:7593708553861781584:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7593708571041652339:3033] 2026-01-10T12:27:34.995303Z node 4 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [4:7593708553861781584:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7593708571041652345:3035] 2026-01-10T12:27:34.995363Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7593708571041652326:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:34.995479Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593708558156749183:2130], cacheItem# { Subscriber: { Subscriber: [4:7593708571041652326:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:34.995514Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-10T12:27:34.995583Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7593708571041652325:3033] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:34.995633Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593708558156749183:2130], cacheItem# { Subscriber: { Subscriber: [4:7593708571041652325:3033] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:34.995689Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:27:34.995731Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7593708558156749183:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7593708571041652327:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:34.995789Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593708571041652346:3036], recipient# [4:7593708571041652321:2323], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:34.995808Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593708558156749183:2130], cacheItem# { Subscriber: { Subscriber: [4:7593708571041652327:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:34.995892Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593708571041652347:3037], recipient# [4:7593708571041652323:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:35.972165Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708553861781678:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:35.972286Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:35.987222Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7593708558156749183:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:35.987364Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593708558156749183:2130], cacheItem# { Subscriber: { Subscriber: [4:7593708558156749842:2568] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:35.987488Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593708575336619656:3043], recipient# [4:7593708575336619655:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:35.995386Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7593708558156749183:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:35.995529Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7593708558156749183:2130], cacheItem# { Subscriber: { Subscriber: [4:7593708571041652327:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:35.995656Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7593708575336619658:3044], recipient# [4:7593708575336619657:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestQuoterAccountResourcesBurst |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestKesusConfig |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> KqpQuery::Pure [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2026-01-10T12:27:37.783461Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783596Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.810684Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.810843Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.848783Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.849374Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=16180876424067615921, session=0, seqNo=0) 2026-01-10T12:27:37.849574Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:37.864898Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=16180876424067615921, session=1) 2026-01-10T12:27:37.866910Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=3685192622962277478, session=2) 2026-01-10T12:27:37.867008Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=3685192622962277478) 2026-01-10T12:27:37.867684Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:136:2161], cookie=13075808530354265934 2026-01-10T12:27:37.868619Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=10078904445965422002, session=1, seqNo=0) 2026-01-10T12:27:37.880705Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=10078904445965422002, session=1) 2026-01-10T12:27:37.881134Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:37.881289Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:37.881406Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:37.881572Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=1530473016259779316, session=1) 2026-01-10T12:27:37.892040Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:27:37.892160Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:27:37.892235Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-10T12:27:37.904661Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:37.904763Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=1530473016259779316) 2026-01-10T12:27:37.904834Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:27:38.334455Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.334573Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.355717Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.356265Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.397024Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.397486Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:136:2161], cookie=16665396433808849017, path="") 2026-01-10T12:27:38.410978Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:136:2161], cookie=16665396433808849017, status=SUCCESS) 2026-01-10T12:27:38.411578Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:146:2168], cookie=111, session=0, seqNo=0) 2026-01-10T12:27:38.411716Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:38.411906Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:146:2168], cookie=10535034419589238939, session=1) 2026-01-10T12:27:38.425499Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:27:38.425585Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:27:38.444553Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:146:2168], cookie=111, session=1) 2026-01-10T12:27:38.444686Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:146:2168], cookie=10535034419589238939) 2026-01-10T12:27:38.444740Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:27:38.801518Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.801631Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.821115Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.821249Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.856068Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.856662Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=13596174928391371787, session=0, seqNo=0) 2026-01-10T12:27:38.856815Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:38.869019Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=13596174928391371787, session=1) 2026-01-10T12:27:38.869899Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=9699750648023261939, session=1) 2026-01-10T12:27:38.870003Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:27:38.882126Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=9699750648023261939) 2026-01-10T12:27:38.883076Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=15493317282100163024) 2026-01-10T12:27:38.883157Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=15493317282100163024) 2026-01-10T12:27:38.883722Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:157:2179], cookie=16241459005329361593, session=0, seqNo=0) 2026-01-10T12:27:38.883848Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:38.896272Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:157:2179], cookie=16241459005329361593, session=2) 2026-01-10T12:27:38.897604Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=17359231144178341349, session=2) 2026-01-10T12:27:38.897716Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2026-01-10T12:27:38.909961Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=17359231144178341349) 2026-01-10T12:27:39.299254Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.299375Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.320709Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.320850Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.337309Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.338111Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-01-10T12:27:39.338254Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.365944Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-01-10T12:27:39.366725Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:143:2166], cookie=23456, session=1, seqNo=0) 2026-01-10T12:27:39.384943Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:143:2166], cookie=23456, session=1) 2026-01-10T12:27:39.738769Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.738871Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.785954Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.786102Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.826077Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.827034Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-10T12:27:39.827195Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.840791Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12345, session=1) 2026-01-10T12:27:39.841591Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-10T12:27:39.861080Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:144:2166], cookie=23456, session=1) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 13598, MsgBus: 29360 2026-01-10T12:25:47.241549Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708112416476231:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:47.241662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:47.290333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:25:47.577726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:47.589125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:47.589242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:47.622049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:47.649512Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.652360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708112416476196:2081] 1768047947236071 != 1768047947236074 2026-01-10T12:25:47.879254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:47.930433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:47.930535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:47.930550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:47.930649Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:48.256304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:48.583310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:48.665541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:48.844202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:49.020335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:49.099173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:50.854178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708125301379962:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:50.854313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:50.854629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708125301379971:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:50.854711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.502769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.533963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.564067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.593498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.627621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.686208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.717063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.785874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:51.868714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708129596348144:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.868798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.868883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708129596348149:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.868991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708129596348151:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.869044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:51.876017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:51.886133Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708129596348153:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:51.953371Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708129596348204:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:52.241504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708112416476231:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:52.241632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6393, MsgBus: 2329 2026-01-10T12:25:55.357887Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline ... ROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7593708534637882650:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.003725Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:26.048655Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:26.203093Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:26.222040Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593708534637882623:2081] 1768048045987215 != 1768048045987218 2026-01-10T12:27:26.227289Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:26.227421Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:26.266931Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:26.296181Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:26.444991Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:26.445020Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:26.445032Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:26.445161Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:27.024997Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:27.587449Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:27.599700Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:27.617988Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:27.775918Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:28.103474Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:28.250741Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:31.004256Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708534637882650:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:31.007099Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:32.500657Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708564702655576:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:32.500797Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:32.501359Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708564702655585:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:32.501439Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:32.682216Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:32.739693Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:32.801697Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:32.861684Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:32.930319Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:32.996011Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.055800Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.210243Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.340199Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708568997623774:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.340390Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.340427Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708568997623779:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.340668Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708568997623781:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.340729Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.348727Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:33.371364Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708568997623783:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:33.469660Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708568997623834:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:37.567835Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768048057041, txId: 281474976710673] shutting down |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> KqpLimits::ReplySizeExceeded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2026-01-10T12:27:37.783495Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783640Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.806848Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.807018Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.846856Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.847494Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=14989314956129397318, session=0, seqNo=0) 2026-01-10T12:27:37.850325Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:37.863123Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=14989314956129397318, session=1) 2026-01-10T12:27:37.863534Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=1595273287516737310, session=0, seqNo=0) 2026-01-10T12:27:37.863677Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:37.876234Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=1595273287516737310, session=2) 2026-01-10T12:27:38.333384Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.333478Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.352964Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.353363Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.387907Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.388523Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=10170811363837172774, session=1, seqNo=0) 2026-01-10T12:27:38.400598Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=10170811363837172774, session=1) 2026-01-10T12:27:38.817526Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.817635Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.842358Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.842515Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.880376Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.881361Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=4322250941479102439, session=0, seqNo=0) 2026-01-10T12:27:38.881538Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:38.900887Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=4322250941479102439, session=1) 2026-01-10T12:27:39.395743Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.395853Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.418940Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.419060Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.437268Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.437711Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:137:2161], cookie=15592026521794042885, path="") 2026-01-10T12:27:39.462935Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:137:2161], cookie=15592026521794042885, status=SUCCESS) 2026-01-10T12:27:39.464200Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:146:2168], cookie=17127992520959253101, session=0, seqNo=0) 2026-01-10T12:27:39.464396Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.481228Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:146:2168], cookie=17127992520959253101, session=1) 2026-01-10T12:27:39.482200Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:147:2169], cookie=111, session=0, seqNo=0) 2026-01-10T12:27:39.482343Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:39.482514Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:147:2169], cookie=222, seqNo=0 2026-01-10T12:27:39.494916Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:147:2169], cookie=111, session=2) 2026-01-10T12:27:40.057723Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.057877Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.081235Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.081409Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.118005Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.118431Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:136:2161], cookie=2078245414325376707, path="") 2026-01-10T12:27:40.132740Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:136:2161], cookie=2078245414325376707, status=SUCCESS) 2026-01-10T12:27:40.133882Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:146:2168], cookie=7305946109354421285, session=0, seqNo=0) 2026-01-10T12:27:40.134052Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.150264Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:146:2168], cookie=7305946109354421285, session=1) 2026-01-10T12:27:40.151155Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:146:2168], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:40.151364Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:40.151479Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:40.151906Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=111, session=0, seqNo=0) 2026-01-10T12:27:40.151999Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:40.152166Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=222, session=1, seqNo=0) 2026-01-10T12:27:40.165005Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:146:2168], cookie=123) 2026-01-10T12:27:40.165116Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=111, session=2) 2026-01-10T12:27:40.165178Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=222, session=1) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestAcquireWaiterDowngrade >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex |95.1%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2026-01-10T12:27:37.783454Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783569Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.807181Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.807353Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.847004Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.401628Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.401745Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.424504Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.425393Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.465017Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.831119Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.831231Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.850904Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.851056Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.886683Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.363818Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.363954Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.385442Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.385848Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.412900Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.415338Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2026-01-10T12:27:39.415895Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:195:2162]) 2026-01-10T12:27:40.042271Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.042396Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.064390Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.064552Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2026-01-10T12:27:40.116790Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 12994912599570116037 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2026-01-10T12:27:40.117679Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2026-01-10T12:27:40.118018Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:193:2162]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2026-01-10T12:27:38.415167Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.415280Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.441334Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.441535Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.481682Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.482453Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=5500270725363157712, session=0, seqNo=222) 2026-01-10T12:27:38.482605Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:38.497678Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=5500270725363157712, session=1) 2026-01-10T12:27:38.498235Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=458389798631706365, session=1, seqNo=111) 2026-01-10T12:27:38.518032Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=458389798631706365, session=1) 2026-01-10T12:27:39.079438Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.079593Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.106898Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.107417Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.148579Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.149270Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=111, session=0, seqNo=42) 2026-01-10T12:27:39.149458Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.149652Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=222, session=1, seqNo=41) 2026-01-10T12:27:39.163038Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=111, session=1) 2026-01-10T12:27:39.163147Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=222, session=1) 2026-01-10T12:27:39.576811Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.576925Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.595929Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.596103Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.640750Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.641706Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=14098158362779938513, session=0, seqNo=0) 2026-01-10T12:27:39.641945Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.660834Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=14098158362779938513, session=1) 2026-01-10T12:27:39.662681Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=15695212824975531375) 2026-01-10T12:27:39.662780Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=15695212824975531375) 2026-01-10T12:27:40.216793Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.216911Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.237363Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.237498Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.254768Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.641450Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.641561Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.661828Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.662179Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.686991Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.687563Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=17863830065001966719, session=0, seqNo=0) 2026-01-10T12:27:40.687748Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.701558Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=17863830065001966719, session=1) 2026-01-10T12:27:40.701964Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:40.702133Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:40.702226Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:40.716947Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=111) 2026-01-10T12:27:40.717911Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=10533546360295178811, name="Sem1", limit=42) 2026-01-10T12:27:40.718059Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-10T12:27:40.732885Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=10533546360295178811) 2026-01-10T12:27:40.733458Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2178], cookie=6725424243842430385, name="Sem1", limit=42) 2026-01-10T12:27:40.745973Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2178], cookie=6725424243842430385) 2026-01-10T12:27:40.746514Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2183], cookie=3538463161942695718, name="Sem1", limit=51) 2026-01-10T12:27:40.758480Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2183], cookie=3538463161942695718) 2026-01-10T12:27:40.758973Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2188], cookie=5808735039090964112, name="Lock1", limit=42) 2026-01-10T12:27:40.771366Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2188], cookie=5808735039090964112) 2026-01-10T12:27:40.772013Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:171:2193], cookie=16685239788399302719, name="Lock1", limit=18446744073709551615) 2026-01-10T12:27:40.784604Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:171:2193], cookie=16685239788399302719) 2026-01-10T12:27:40.785274Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:176:2198], cookie=7999226566570573217, name="Sem1") 2026-01-10T12:27:40.785385Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:176:2198], cookie=7999226566570573217) 2026-01-10T12:27:40.785998Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:179:2201], cookie=7566747647280877541, name="Sem2") 2026-01-10T12:27:40.786090Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:179:2201], cookie=7566747647280877541) 2026-01-10T12:27:40.803707Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.803815Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.804333Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.805014Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.843431Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.843572Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:40.843970Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:218:2231], cookie=533873434907837466, name="Sem1") 2026-01-10T12:27:40.844069Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:218:2231], cookie=533873434907837466) 2026-01-10T12:27:40.844725Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:225:2237], cookie=4487834501240917270, name="Sem2") 2026-01-10T12:27:40.844816Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:225:2237], cookie=4487834501240917270) |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2026-01-10T12:27:37.783448Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783623Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.806172Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.806347Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.852842Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.853348Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=2008992203181609572, session=0, seqNo=0) 2026-01-10T12:27:37.853515Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:37.866294Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=2008992203181609572, session=1) 2026-01-10T12:27:37.866601Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=11694582839459700409, session=0, seqNo=0) 2026-01-10T12:27:37.866738Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:37.880436Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=11694582839459700409, session=2) 2026-01-10T12:27:37.880745Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=111, name="Lock1") 2026-01-10T12:27:37.893642Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:37.893990Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:37.894137Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:37.894285Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:37.907530Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-10T12:27:37.907859Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2026-01-10T12:27:37.920150Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2026-01-10T12:27:38.325577Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:38.325680Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:38.345097Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:38.345517Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.381282Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.381876Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=7865702960234017041, session=0, seqNo=0) 2026-01-10T12:27:38.382037Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:38.395996Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=7865702960234017041, session=1) 2026-01-10T12:27:38.396349Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=16073301851516513804, session=0, seqNo=0) 2026-01-10T12:27:38.396502Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:38.409798Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=16073301851516513804, session=2) 2026-01-10T12:27:38.410330Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:148:2170], cookie=3894059421558971412, name="Sem1", limit=1) 2026-01-10T12:27:38.410476Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:27:38.428693Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:148:2170], cookie=3894059421558971412) 2026-01-10T12:27:38.429004Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:38.429148Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:27:38.429334Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-10T12:27:38.442301Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-10T12:27:38.442398Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=222) 2026-01-10T12:27:38.443212Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=7269542212144694814, name="Sem1") 2026-01-10T12:27:38.443371Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=7269542212144694814) 2026-01-10T12:27:38.444058Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2181], cookie=4252277362488170661, name="Sem1") 2026-01-10T12:27:38.444203Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2181], cookie=4252277362488170661) 2026-01-10T12:27:38.444600Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:136:2161], cookie=333, name="Sem1") 2026-01-10T12:27:38.444764Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2026-01-10T12:27:38.457918Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:136:2161], cookie=333) 2026-01-10T12:27:38.458404Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=14735841479191221139, name="Sem1") 2026-01-10T12:27:38.458468Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=14735841479191221139) 2026-01-10T12:27:38.458818Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=4931785599769890965, name="Sem1") 2026-01-10T12:27:38.458868Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=4931785599769890965) 2026-01-10T12:27:38.459065Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:136:2161], cookie=444, name="Sem1") 2026-01-10T12:27:38.459134Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-10T12:27:38.473510Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:136:2161], cookie=444) 2026-01-10T12:27:38.474554Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:172:2194], cookie=362909685826189891, name="Sem1") 2026-01-10T12:27:38.474682Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:172:2194], cookie=362909685826189891) 2026-01-10T12:27:38.475436Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:175:2197], cookie=15342880241870636449, name="Sem1") 2026-01-10T12:27:38.475545Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:175:2197], cookie=15342880241870636449) 2026-01-10T12:27:39.039232Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.039335Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.059905Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.060068Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.097028Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.097509Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:136:2161], cookie=4936193934597990828, name="Sem1", limit=1) 2026-01-10T12:27:39.097712Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:27:39.112526Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:136:2161], cookie=4936193934597990828) 2026-01-10T12:27:39.113217Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:146:2168], cookie=17594340403781842843, name="Sem2", limit=1) 2026-01-10T12:27:39.113395Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2026-01-10T12:27:39.125970Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:146:2168], cookie=17594340403781842843) 2026-01-10T12:27:39.126657Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2173], cookie=9255191351177112130, name="Sem1") 2026-01-10T12:27:39.126767Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2173], cookie=9255191351177112130) 2026-01-10T12:27:39.127240Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2176], cookie=7525395408179493524, name="Sem2") 2026-01-10T12:27:39.127320Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2176], cookie=7525395408179493524) 2026-01-10T12:27:39.143750Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.143885Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute ... emaphoreCreate::Complete (sender=[4:249:2270], cookie=13638910259423855916) 2026-01-10T12:27:40.194221Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:40.194391Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2026-01-10T12:27:40.208909Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-10T12:27:40.209603Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-10T12:27:40.225250Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-10T12:27:40.225831Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Sem1") 2026-01-10T12:27:40.225951Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2026-01-10T12:27:40.259956Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-10T12:27:40.260619Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=444, session=2, semaphore="Sem1" count=1) 2026-01-10T12:27:40.276175Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=444) 2026-01-10T12:27:40.276748Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=555, name="Sem1") 2026-01-10T12:27:40.276849Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2026-01-10T12:27:40.276935Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2026-01-10T12:27:40.290077Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=555) 2026-01-10T12:27:40.644270Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.644397Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.665930Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.666341Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.691468Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.692120Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=11987154348209529098, session=0, seqNo=0) 2026-01-10T12:27:40.692298Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.704657Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=11987154348209529098, session=1) 2026-01-10T12:27:40.705034Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=112, name="Sem1", limit=5) 2026-01-10T12:27:40.705205Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:27:40.717640Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=112) 2026-01-10T12:27:40.718031Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=113, name="Sem1") 2026-01-10T12:27:40.730426Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=113) 2026-01-10T12:27:40.730806Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=114, name="Sem1", force=0) 2026-01-10T12:27:40.730924Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-10T12:27:40.742982Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=114) 2026-01-10T12:27:40.743327Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:137:2161], cookie=10221938700262158653 2026-01-10T12:27:40.743655Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=115, name="Sem1", limit=5) 2026-01-10T12:27:40.758026Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=115) 2026-01-10T12:27:40.758395Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=116, name="Sem1") 2026-01-10T12:27:40.772738Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=116) 2026-01-10T12:27:40.773180Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=117, name="Sem1", force=0) 2026-01-10T12:27:40.785463Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=117) 2026-01-10T12:27:40.785790Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=118, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:40.798325Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=118) 2026-01-10T12:27:40.798708Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=119, name="Sem1") 2026-01-10T12:27:40.811351Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=119) 2026-01-10T12:27:40.811765Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=120, name="Sem1") 2026-01-10T12:27:40.811880Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=120) 2026-01-10T12:27:40.812159Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:137:2161], cookie=15463680290640837824, session=1) 2026-01-10T12:27:40.812272Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:27:40.826612Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:137:2161], cookie=15463680290640837824) 2026-01-10T12:27:40.826974Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=121, name="Sem1", limit=5) 2026-01-10T12:27:40.839714Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=121) 2026-01-10T12:27:40.840138Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=122, name="Sem1") 2026-01-10T12:27:40.852890Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=122) 2026-01-10T12:27:40.853273Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=123, name="Sem1", force=0) 2026-01-10T12:27:40.870468Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=123) 2026-01-10T12:27:40.870813Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=124, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:40.883611Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=124) 2026-01-10T12:27:40.884065Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=125, name="Sem1") 2026-01-10T12:27:40.907371Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=125) 2026-01-10T12:27:40.907777Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=126, name="Sem1") 2026-01-10T12:27:40.907884Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=126) 2026-01-10T12:27:40.908648Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2161], cookie=127, name="Sem1", limit=5) 2026-01-10T12:27:40.908741Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2161], cookie=127) 2026-01-10T12:27:40.909025Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2161], cookie=128, name="Sem1") 2026-01-10T12:27:40.909100Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2161], cookie=128) 2026-01-10T12:27:40.909376Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2161], cookie=129, name="Sem1", force=0) 2026-01-10T12:27:40.909451Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2161], cookie=129) 2026-01-10T12:27:40.909713Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=130, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:40.909800Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=130) 2026-01-10T12:27:40.910052Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=131, name="Sem1") 2026-01-10T12:27:40.910122Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=131) 2026-01-10T12:27:40.910371Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2161], cookie=132, name="Sem1") 2026-01-10T12:27:40.910437Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2161], cookie=132) >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2026-01-10T12:27:37.783794Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783887Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.809761Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.809958Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.846827Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.855572Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=15535142512930871387, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-10T12:27:37.855868Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:37.868224Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=15535142512930871387) 2026-01-10T12:27:37.868802Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2168], cookie=10863409841472854792, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-10T12:27:37.869045Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2026-01-10T12:27:37.884464Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2168], cookie=10863409841472854792) 2026-01-10T12:27:37.885101Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2173], cookie=8602917552697187888, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:37.885329Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2026-01-10T12:27:37.898195Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2173], cookie=8602917552697187888) 2026-01-10T12:27:37.898838Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2178], cookie=15793262928105654669, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:37.899041Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-10T12:27:37.911140Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2178], cookie=15793262928105654669) 2026-01-10T12:27:37.911771Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2183], cookie=9075453764860790031, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:37.911977Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2026-01-10T12:27:37.923862Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2183], cookie=9075453764860790031) 2026-01-10T12:27:37.924518Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2188], cookie=13715212128142121356, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:37.924755Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2026-01-10T12:27:37.940614Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2188], cookie=13715212128142121356) 2026-01-10T12:27:37.941245Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2193], cookie=15736962554662540781, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-10T12:27:37.941429Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2026-01-10T12:27:37.956615Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2193], cookie=15736962554662540781) 2026-01-10T12:27:37.957377Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:176:2198], cookie=16354700218319103020, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:37.957588Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2026-01-10T12:27:37.970243Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:176:2198], cookie=16354700218319103020) 2026-01-10T12:27:37.971130Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:181:2203], cookie=5257292294016078, ids=[100], paths=[], recursive=0) 2026-01-10T12:27:37.971248Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:181:2203], cookie=5257292294016078) 2026-01-10T12:27:37.971889Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:184:2206], cookie=8566267756403931414, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-10T12:27:37.971987Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:184:2206], cookie=8566267756403931414) 2026-01-10T12:27:37.972611Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:187:2209], cookie=10820824673466762199, ids=[], paths=[/Root, ], recursive=0) 2026-01-10T12:27:37.972730Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:187:2209], cookie=10820824673466762199) 2026-01-10T12:27:37.973287Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:190:2212], cookie=6218555270173458129, ids=[1, 1], paths=[], recursive=0) 2026-01-10T12:27:37.973361Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:190:2212], cookie=6218555270173458129) 2026-01-10T12:27:37.974000Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:193:2215], cookie=15567415607883481515, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2026-01-10T12:27:37.974114Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:193:2215], cookie=15567415607883481515) 2026-01-10T12:27:37.974684Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:196:2218], cookie=16376114500012283437, ids=[], paths=[], recursive=1) 2026-01-10T12:27:37.974782Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:196:2218], cookie=16376114500012283437) 2026-01-10T12:27:37.976584Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:199:2221], cookie=13323590086341677162, ids=[], paths=[], recursive=0) 2026-01-10T12:27:37.976676Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:199:2221], cookie=13323590086341677162) 2026-01-10T12:27:37.977275Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:202:2224], cookie=15417386237715560254, ids=[3, 2], paths=[], recursive=1) 2026-01-10T12:27:37.977354Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:202:2224], cookie=15417386237715560254) 2026-01-10T12:27:37.977977Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:205:2227], cookie=5745784837214022460, ids=[3, 2], paths=[], recursive=0) 2026-01-10T12:27:37.978038Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:205:2227], cookie=5745784837214022460) 2026-01-10T12:27:37.978641Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:208:2230], cookie=14221360736161165855, ids=[], paths=[Root2/], recursive=1) 2026-01-10T12:27:37.978740Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:208:2230], cookie=14221360736161165855) 2026-01-10T12:27:37.979394Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:211:2233], cookie=7151914762372860769, ids=[], paths=[Root2/], recursive=0) 2026-01-10T12:27:37.979462Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:211:2233], cookie=7151914762372860769) 2026-01-10T12:27:37.997212Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.997360Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.997950Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.998584Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:38.017543Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:38.018013Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=681455685233272851, ids=[100], paths=[], recursive=0) 2026-01-10T12:27:38.018116Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=681455685233272851) 2026-01-10T12:27:38.018930Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2268], cookie=8403885357172361296, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-10T12:27:38.019029Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2268], cookie=8403885357172361296) 2026-01-10T12:27:38.019711Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2271], cookie=11000315819141971485, ids=[], paths=[/Root, ], recursive=0) 2026-01-10T12:27:38.019800Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2271], cookie=11000315819141971485) 2026-01-10T12:27:38.020592Z nod ... BLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-10T12:27:40.413076Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:156:2178], cookie=15571374556754214533) 2026-01-10T12:27:40.413764Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:161:2183], cookie=7113420366525978576, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.413886Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:161:2183], cookie=7113420366525978576) 2026-01-10T12:27:40.414765Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:167:2189], cookie=573149562811283017, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.414841Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:167:2189], cookie=573149562811283017) 2026-01-10T12:27:40.415754Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:173:2195], cookie=7244717333760251226, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.415836Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:173:2195], cookie=7244717333760251226) 2026-01-10T12:27:40.416731Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:176:2198], cookie=15943542434072351966, id=0, path="/Root/Folder/NonexistingRes") 2026-01-10T12:27:40.416855Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:176:2198], cookie=15943542434072351966) 2026-01-10T12:27:40.417444Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:179:2201], cookie=3867957630084088370, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.417531Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:179:2201], cookie=3867957630084088370) 2026-01-10T12:27:40.418123Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:182:2204], cookie=14722235804923073284, id=100, path="") 2026-01-10T12:27:40.418201Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:182:2204], cookie=14722235804923073284) 2026-01-10T12:27:40.418716Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:185:2207], cookie=12010600753117093717, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.418798Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:185:2207], cookie=12010600753117093717) 2026-01-10T12:27:40.419466Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:188:2210], cookie=14499182289168922947, id=3, path="") 2026-01-10T12:27:40.419556Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:188:2210], cookie=14499182289168922947) 2026-01-10T12:27:40.424472Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:191:2213], cookie=8582482752853412349, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.424590Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:191:2213], cookie=8582482752853412349) 2026-01-10T12:27:40.425330Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:194:2216], cookie=18155862357381921239, id=0, path="/Root/Folder/Q1") 2026-01-10T12:27:40.425523Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2026-01-10T12:27:40.447079Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:194:2216], cookie=18155862357381921239) 2026-01-10T12:27:40.447884Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:199:2221], cookie=12989240359417999966, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.447994Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:199:2221], cookie=12989240359417999966) 2026-01-10T12:27:40.469349Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.469476Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.470060Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.470499Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.522626Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.523016Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:238:2251], cookie=16684162701824408006, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.523106Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:238:2251], cookie=16684162701824408006) 2026-01-10T12:27:40.524125Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:244:2256], cookie=1505818396991421011, id=3, path="") 2026-01-10T12:27:40.524271Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2026-01-10T12:27:40.536892Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:244:2256], cookie=1505818396991421011) 2026-01-10T12:27:40.537627Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:249:2261], cookie=18333165495462230450, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.537722Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:249:2261], cookie=18333165495462230450) 2026-01-10T12:27:40.553060Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.553163Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.553647Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.554240Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.593335Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.593714Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:288:2291], cookie=13060582200674484225, ids=[], paths=[], recursive=1) 2026-01-10T12:27:40.593814Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:288:2291], cookie=13060582200674484225) 2026-01-10T12:27:40.940095Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.940214Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.960098Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.960489Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.985272Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.985734Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2161], cookie=4495582100326342372, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:40.985949Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2026-01-10T12:27:40.997986Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2161], cookie=4495582100326342372) 2026-01-10T12:27:40.998615Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=3521534998478329664, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-10T12:27:40.998807Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2026-01-10T12:27:41.012025Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=3521534998478329664) 2026-01-10T12:27:41.013727Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 7636287764819580818. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:41.013818Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=7636287764819580818) 2026-01-10T12:27:41.014622Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 8384427158941684208. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2026-01-10T12:27:41.014677Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=8384427158941684208) |95.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::CreateTableAs-Stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 8567, MsgBus: 32626 2026-01-10T12:26:53.849012Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708394991308699:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:53.849420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:54.122088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:54.122181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:54.172268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:54.188267Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708394991308594:2081] 1768048013832537 != 1768048013832540 2026-01-10T12:26:54.196509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:54.205756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:54.327790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:54.327821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:54.327836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:54.327946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:54.369629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:54.752466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:54.761633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:54.811384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:54.874567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:54.985495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:55.143949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:55.228448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.324987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708412171179651:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.325102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.325535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708412171179660:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.325569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:57.642497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.673492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.702418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.728986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.761070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.802915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.872507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:57.919867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:58.000811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708412171180538:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.000911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.001204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708416466147839:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.001269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708416466147840:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.001324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.005472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:58.023822Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708416466147843:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:58.109462Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708416466147894:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:58.848198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708394991308699:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:58.848264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2909, MsgBus: 16935 2026-01-10T12:27:01.106599Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... IDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:32.188662Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:32.311545Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:32.311661Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:32.313064Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708561290556578:2081] 1768048052150190 != 1768048052150193 2026-01-10T12:27:32.328779Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:32.340276Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:32.345286Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:27:32.345307Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:27:32.388161Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:32.401314Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:32.401340Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:32.401350Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:32.401441Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:32.967982Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:32.975995Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:32.986412Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:33.000319Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.080979Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.204225Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:33.273155Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.372954Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.449150Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708578470427636:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:36.449274Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:36.449601Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708578470427646:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:36.449687Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:36.536038Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.579989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.624395Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.666159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.711801Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.760156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.822434Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.883571Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:37.014241Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708582765395818:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:37.014383Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:37.014754Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708582765395823:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:37.014835Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708582765395824:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:37.014887Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:37.019837Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:37.039164Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708582765395827:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:37.106033Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708582765395878:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:37.156244Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708561290556632:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:37.156308Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 22757, MsgBus: 15043 2026-01-10T12:26:37.830624Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708327765723459:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:37.831634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:38.062673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:38.068602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:38.068727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:38.082824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:38.189153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:38.190613Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708327765723354:2081] 1768047997826248 != 1768047997826251 2026-01-10T12:26:38.262908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:38.262941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:38.262951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:38.263037Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:38.299441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:38.763036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:38.771715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:38.817610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:38.836578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:42.830458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708327765723459:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:42.830527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:47.841032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708370715398110:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.841042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708370715398119:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.841119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.841440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708370715398126:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.841501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:47.845144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:47.858442Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708370715398125:2449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:26:47.963406Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708370715398181:3172] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:48.247741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:53.043153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:26:53.043182Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 11178, MsgBus: 11678 2026-01-10T12:26:58.182575Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708415772746431:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:58.182678Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:58.215462Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:58.382789Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:58.385069Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:58.396176Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708415772746395:2081] 1768048018181356 != 1768048018181359 2026-01-10T12:26:58.396623Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:58.396711Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:58.426840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:58.497187Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:58.497221Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:58.497229Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:58.497661Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:58.569366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:59.034128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:59.046430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:59.061133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.209087Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:03.183227Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708415772746431:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:03.183359Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:09.021331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708463017388453:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.021395Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.021618Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708463017388465:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issu ... 2026-01-10T12:27:28.893739Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:28.896041Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708547535873174:2081] 1768048048640693 != 1768048048640696 2026-01-10T12:27:28.896120Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:28.911994Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:28.912124Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:28.946714Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:29.140991Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:29.147483Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:29.147511Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:29.147521Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:29.147610Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:29.698502Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:29.746442Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:29.783045Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:29.885766Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:30.192602Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:30.281788Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.516249Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708569010711523:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.516394Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.520243Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708569010711533:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.520360Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.587885Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.638948Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.666175Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708547535873306:2165];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:33.666246Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:33.689370Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.725717Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.761959Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.798156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.835060Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.888344Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:33.974363Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708569010712406:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.974503Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.974526Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708569010712411:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.974989Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708569010712413:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.975059Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:33.979036Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:33.990005Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708569010712414:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:34.058295Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708573305679762:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:36.084826Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.237909Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTI5ZjlmYTAtY2U0MWU2MC1hZDExNTUxMS04MDUwMzc4Mg==, ActorId: [5:7593708581895614676:2535], ActorState: ExecuteState, LegacyTraceId: 01keky0sbff2rvswy8z3298sj2, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202524 > 50331648)" issue_code: 2013 severity: 1 } trace_id# |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TestShred::ShredManualLaunch >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TSubDomainTest::ConsistentCopyTable >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> KqpStats::RequestUnitForExecute [GOOD] >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> TOlapNaming::AlterColumnTableFailed >> TOlap::CreateStore >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> TOlap::CreateDropStandaloneTable >> TOlap::CreateStoreWithDirs >> TOlap::StoreStatsQuota >> TOlapNaming::CreateColumnTableOk >> KqpStats::OneShardNonLocalExec+UseSink [FAIL] >> KqpStats::OneShardNonLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2026-01-10T12:27:41.717219Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:41.717369Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:41.739806Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:41.739987Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:41.776970Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:41.777692Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4315332366871329343, session=0, seqNo=0) 2026-01-10T12:27:41.777921Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:41.790524Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4315332366871329343, session=1) 2026-01-10T12:27:41.790886Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=12895522721320821425, session=0, seqNo=0) 2026-01-10T12:27:41.791021Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:41.803396Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=12895522721320821425, session=2) 2026-01-10T12:27:41.803776Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-10T12:27:41.804009Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:41.804137Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:41.816776Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:41.817243Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:41.817597Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:41.817725Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2026-01-10T12:27:41.830538Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-10T12:27:41.830628Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=333) 2026-01-10T12:27:41.831316Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=10394698391126443873, name="Lock1") 2026-01-10T12:27:41.831452Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=10394698391126443873) 2026-01-10T12:27:42.347683Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:42.347791Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:42.366478Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:42.367096Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:42.401772Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:42.402221Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=17376999288007466218, session=0, seqNo=0) 2026-01-10T12:27:42.402334Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:42.414183Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=17376999288007466218, session=1) 2026-01-10T12:27:42.414451Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=13083709999907460801, session=0, seqNo=0) 2026-01-10T12:27:42.414554Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:42.427534Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=13083709999907460801, session=2) 2026-01-10T12:27:42.427940Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:42.428135Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:42.428239Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:42.440783Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-10T12:27:42.441220Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:42.441579Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:42.454001Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=222) 2026-01-10T12:27:42.454095Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=333) 2026-01-10T12:27:42.454742Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:153:2175], cookie=3112323759879668015, name="Lock1") 2026-01-10T12:27:42.454842Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:153:2175], cookie=3112323759879668015) 2026-01-10T12:27:42.455310Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=3093989048754035238, name="Lock1") 2026-01-10T12:27:42.455384Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=3093989048754035238) 2026-01-10T12:27:42.746647Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:42.746764Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:42.767740Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:42.767886Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:42.802969Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:42.803526Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=585408749038241925, session=0, seqNo=0) 2026-01-10T12:27:42.803674Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:42.816419Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=585408749038241925, session=1) 2026-01-10T12:27:42.816825Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=10257325468064516772, session=0, seqNo=0) 2026-01-10T12:27:42.816980Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:42.829469Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=10257325468064516772, session=2) 2026-01-10T12:27:42.830268Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:42.830436Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:42.830535Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:42.843122Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-10T12:27:42.843518Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:42.843803Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:42.843875Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-10T12:27:42.855636Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=222) 2026-01-10T12:27:42.855703Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=333) 2026-01-10T12:27:42.856318Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:156:2178], cookie=12391888820555527966, name="Lock1") 2026-01-10T12:27:42.856414Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:156:2178], cookie=12391888820555527966) 2026-01-10T12:27:42.856821Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:159:2181], cookie=8848901231749239206, name="Lock1") 2026-01-10T12:27:42.856891Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:159:2181], cookie=8848901231749239206) 2026-01-10T12:27:42.869359Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:42.869471Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:42.869935Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:42.870549Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:42.918542Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:42.918717Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:42.919182Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:198:2211], cookie=779394800486008991, name="Lock1") 2026-01-10T12:27:42.919282Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:198:2211], cookie=779394800486008991) 2026-01-10T12:27:42.919862Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2218], cookie=12508163384750139825, name="Lock1") 2026-01-10T12:27:42.919937Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2218], cookie=12508163384750139825) 2026-01-10T12:27:43.371471Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:43.371584Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:43.385758Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:43.385886Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:43.400031Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:43.400693Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=7782624027017012884, session=0, seqNo=0) 2026-01-10T12:27:43.400864Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:43.423972Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=7782624027017012884, session=1) 2026-01-10T12:27:43.424382Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=11998258518424178206, session=0, seqNo=0) 2026-01-10T12:27:43.424532Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:43.436905Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=11998258518424178206, session=2) 2026-01-10T12:27:43.437268Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:43.437431Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:43.437543Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:43.450031Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-10T12:27:43.450443Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:43.450865Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Lock1") 2026-01-10T12:27:43.450995Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-10T12:27:43.463396Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-10T12:27:43.463504Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-10T12:27:43.814173Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:43.814269Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:43.828031Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:43.828365Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:43.857122Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:43.862776Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2161], cookie=10757658536401478781, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-10T12:27:43.863020Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:43.875179Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2161], cookie=10757658536401478781) 2026-01-10T12:27:43.875756Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=15149924350736753442, path="/Root/Res", config={ }) 2026-01-10T12:27:43.876016Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-10T12:27:43.888508Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=15149924350736753442) 2026-01-10T12:27:43.890288Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 6999197189836951340. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:43.890359Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=6999197189836951340) 2026-01-10T12:27:43.890880Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 7624691009388713891. Data: { } 2026-01-10T12:27:43.890928Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=7624691009388713891) 2026-01-10T12:27:43.932871Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.985104Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:44.016275Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:44.057939Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:44.099515Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2026-01-10T12:27:39.885545Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.885678Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.933009Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.933248Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.971157Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.971579Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:136:2161], cookie=6546804989163967802, path="/foo/bar/baz") 2026-01-10T12:27:39.985776Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:136:2161], cookie=6546804989163967802, status=SUCCESS) 2026-01-10T12:27:39.986451Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:146:2168], cookie=6354988289930685733) 2026-01-10T12:27:40.002882Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:146:2168], cookie=6354988289930685733) 2026-01-10T12:27:40.003525Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:151:2173], cookie=5168910852115989135, path="/foo/bar/baz") 2026-01-10T12:27:40.016824Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:151:2173], cookie=5168910852115989135, status=SUCCESS) 2026-01-10T12:27:40.017455Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:156:2178], cookie=3448282759905127881) 2026-01-10T12:27:40.031429Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:156:2178], cookie=3448282759905127881) 2026-01-10T12:27:40.046946Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.047060Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.047532Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.047848Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.089841Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.090244Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:197:2210], cookie=5358733100704921573) 2026-01-10T12:27:40.102638Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:197:2210], cookie=5358733100704921573) 2026-01-10T12:27:40.103341Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:205:2217], cookie=5314253955451240184, path="/foo/bar/baz") 2026-01-10T12:27:40.115822Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:205:2217], cookie=5314253955451240184, status=SUCCESS) 2026-01-10T12:27:40.116571Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:210:2222], cookie=15251351802055970116, path="/foo/bar/baz") 2026-01-10T12:27:40.116664Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:210:2222], cookie=15251351802055970116, status=PRECONDITION_FAILED) 2026-01-10T12:27:40.521448Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.521530Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.535643Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.536017Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.572805Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.573218Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:136:2161], cookie=8607919137899434837, name="Lock1") 2026-01-10T12:27:40.573305Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:136:2161], cookie=8607919137899434837) 2026-01-10T12:27:40.934958Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.935075Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.953983Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.954102Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.992996Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.993627Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=17413519719942977057, session=0, seqNo=0) 2026-01-10T12:27:40.993809Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:41.012043Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=17413519719942977057, session=1) 2026-01-10T12:27:41.012513Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:41.012703Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:41.012820Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:41.029063Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-10T12:27:41.029702Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2170], cookie=9911802956267242885, name="Lock1", force=0) 2026-01-10T12:27:41.042179Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2170], cookie=9911802956267242885) 2026-01-10T12:27:41.042792Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:153:2175], cookie=3965024750158581519, name="Sem1", force=0) 2026-01-10T12:27:41.057251Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:153:2175], cookie=3965024750158581519) 2026-01-10T12:27:41.057919Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:158:2180], cookie=14564208457644000083, name="Sem1", limit=42) 2026-01-10T12:27:41.058093Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-10T12:27:41.072896Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:158:2180], cookie=14564208457644000083) 2026-01-10T12:27:41.073543Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2185], cookie=6068430616133832343, name="Sem1", force=0) 2026-01-10T12:27:41.073650Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2026-01-10T12:27:41.090568Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2185], cookie=6068430616133832343) 2026-01-10T12:27:41.091238Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:168:2190], cookie=11542765369693778678, name="Sem1", force=0) 2026-01-10T12:27:41.105351Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:168:2190], cookie=11542765369693778678) 2026-01-10T12:27:41.572031Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:41.572194Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:41.595027Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:41.595187Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:41.617677Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:41.618345Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=8880571872099442364, session=0, seqNo=0) 2026-01-10T12:27:41.618518Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:41.644950Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=8880571872099442364, session=1) 2026-01-10T12:27:41.645320Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=3908731137712591157, session=0, seqNo=0) 2026-01-10T12:27:41.645487Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:41.657866Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=3908731137712591157, session=2) 2026-01-10T12:27:41.658198Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:137:2161], cookie=5000267461933324769 2026-01-10T12:27:41.658818Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:149:2171], cookie=10803683500937545555, name="Sem1", limit=3) 2026-01-10T12:27:41.658989Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:27:41.671577Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:149:2171], cookie=10803683500937545555) 2026-01-10T12:27:41.671948Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=112, name="Sem1") 2026-01-10T12:27:41.672097Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=112) 2026-01-10T12:27:41.672367Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=113, name="Sem1") 2026-01-10T12:27:41.672445Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=113) 2026-01-10T12:27:41.672679Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=14377166848913767250, session=2, seqNo=0) 2026-01-10T12:27:41.687141Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessi ... e 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.101819Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.112470Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=129, session=1, semaphore="Sem2" count=2) 2026-01-10T12:27:43.125617Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=129) 2026-01-10T12:27:43.126070Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=130, name="Sem2") 2026-01-10T12:27:43.126164Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=130) 2026-01-10T12:27:43.126476Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=131, session=1, semaphore="Sem2" count=1) 2026-01-10T12:27:43.138748Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=131) 2026-01-10T12:27:43.139162Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=132, name="Sem2") 2026-01-10T12:27:43.139246Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=132) 2026-01-10T12:27:43.139532Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=133, name="Sem2") 2026-01-10T12:27:43.139599Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=133) 2026-01-10T12:27:43.606855Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:43.606985Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:43.628248Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:43.628654Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:43.653822Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:43.663292Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2161], cookie=15805942704157517357, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2026-01-10T12:27:43.663608Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2026-01-10T12:27:43.675987Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2161], cookie=15805942704157517357) 2026-01-10T12:27:43.676696Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=7574172042382416096, path="/Root1/Res", config={ }) 2026-01-10T12:27:43.676983Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2026-01-10T12:27:43.693011Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=7574172042382416096) 2026-01-10T12:27:43.693806Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2173], cookie=6716288723135618062, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2026-01-10T12:27:43.694031Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2026-01-10T12:27:43.706554Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2173], cookie=6716288723135618062) 2026-01-10T12:27:43.707238Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2178], cookie=12296085851280351432, path="/Root2/Res", config={ }) 2026-01-10T12:27:43.707498Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2026-01-10T12:27:43.721737Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2178], cookie=12296085851280351432) 2026-01-10T12:27:43.722478Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:161:2183], cookie=117729396619798019, path="/Root2/Res/Subres", config={ }) 2026-01-10T12:27:43.722768Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2026-01-10T12:27:43.735293Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:161:2183], cookie=117729396619798019) 2026-01-10T12:27:43.736715Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 11423773264702629976. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:43.736790Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=11423773264702629976) 2026-01-10T12:27:43.778844Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.830911Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.861905Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.862474Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2192]. Cookie: 13476781549359616908. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-10T12:27:43.863093Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 14344696523599553742. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:43.863150Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=14344696523599553742) 2026-01-10T12:27:43.904839Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.946435Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.947139Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2199]. Cookie: 12721606830570900121. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-10T12:27:43.947917Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 1077699060850910136. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:43.947972Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=1077699060850910136) 2026-01-10T12:27:43.948642Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 5106869041617577057. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:43.948719Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=5106869041617577057) 2026-01-10T12:27:43.979874Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.980069Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:43.980833Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:190:2206]. Cookie: 11590783076237523198. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> KqpQuery::CreateTableAs_MkDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2026-01-10T12:27:26.632198Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708538980233809:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.632334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:26.676034Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708535613681290:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:26.693060Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:27.033728Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.035545Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.035441Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:27:27.195678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.287231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:27.287381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:27.290785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:27.290854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:27.312852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:27.312965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:27.322443Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-10T12:27:27.322622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:27.327908Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:27:27.329078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:27.329238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:27.435941Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.614883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:27.644236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:27.647731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.677646Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:27.720235Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:27.730185Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:27.761256Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:28.064634Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593708538980233936:2146] Handle TEvNavigate describe path dc-1 2026-01-10T12:27:28.064691Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593708547570169028:2474] HANDLE EvNavigateScheme dc-1 2026-01-10T12:27:28.064794Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708538980233946:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:28.064874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593708543275201453:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593708538980233946:2149], cookie# 1 2026-01-10T12:27:28.066541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708543275201459:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201455:2289], cookie# 1 2026-01-10T12:27:28.066569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708543275201460:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201456:2289], cookie# 1 2026-01-10T12:27:28.066589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708543275201461:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201457:2289], cookie# 1 2026-01-10T12:27:28.066632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708538980233579:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201459:2289], cookie# 1 2026-01-10T12:27:28.066663Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708538980233582:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201460:2289], cookie# 1 2026-01-10T12:27:28.066689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708538980233585:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708543275201461:2289], cookie# 1 2026-01-10T12:27:28.066739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708543275201459:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708538980233579:2053], cookie# 1 2026-01-10T12:27:28.066757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708543275201460:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708538980233582:2056], cookie# 1 2026-01-10T12:27:28.066792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708543275201461:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708538980233585:2059], cookie# 1 2026-01-10T12:27:28.066840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708543275201453:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708543275201455:2289], cookie# 1 2026-01-10T12:27:28.066863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593708543275201453:2289][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:27:28.066909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708543275201453:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708543275201456:2289], cookie# 1 2026-01-10T12:27:28.066943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593708543275201453:2289][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:27:28.066971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708543275201453:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708543275201457:2289], cookie# 1 2026-01-10T12:27:28.066989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593708543275201453:2289][/dc-1] Sync cookie mismatch: sender# [1:7593708543275201457:2289], cookie# 1, current cookie# 0 2026-01-10T12:27:28.067041Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593708538980233946:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:27:28.093289Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593708538980233946:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593708543275201453:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:28.093431Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593708538980233946:2149], cacheItem# { Subscriber: { Subscriber: [1:7593708543275201453:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:27:28.102219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593708547570169029:2475], recipient# [1:7593708547570169028:2474], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 7205759404 ... Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[7:24339059:0], [7:1099535966835:0], [7:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-10T12:27:41.937653Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7593708590310493780:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [7:7593708603195396992:2855] 2026-01-10T12:27:41.937674Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7593708590310493780:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2026-01-10T12:27:41.937744Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7593708590310493780:2050] Subscribe: subscriber# [7:7593708603195396992:2855], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:27:41.937808Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7593708590310493783:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [7:7593708603195396993:2855] 2026-01-10T12:27:41.937817Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7593708590310493783:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2026-01-10T12:27:41.937840Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7593708590310493783:2053] Subscribe: subscriber# [7:7593708603195396993:2855], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:27:41.937886Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7593708590310493786:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [7:7593708603195396994:2855] 2026-01-10T12:27:41.937911Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7593708590310493786:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2026-01-10T12:27:41.937936Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7593708590310493786:2056] Subscribe: subscriber# [7:7593708603195396994:2855], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:27:41.937980Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7593708603195396992:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708590310493780:2050] 2026-01-10T12:27:41.938007Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7593708603195396993:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708590310493783:2053] 2026-01-10T12:27:41.938037Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7593708603195396994:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708590310493786:2056] 2026-01-10T12:27:41.938114Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708603195396989:2855] 2026-01-10T12:27:41.938205Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708603195396990:2855] 2026-01-10T12:27:41.938253Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [7:7593708590310494091:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:41.938294Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7593708603195396991:2855] 2026-01-10T12:27:41.938322Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][7:7593708603195396980:2855][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [7:7593708590310494091:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:41.938346Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7593708590310493780:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7593708603195396992:2855] 2026-01-10T12:27:41.938361Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7593708590310493783:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7593708603195396993:2855] 2026-01-10T12:27:41.938389Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7593708590310493786:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7593708603195396994:2855] 2026-01-10T12:27:41.938433Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7593708590310494091:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-10T12:27:41.938528Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7593708590310494091:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7593708603195396980:2855] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:41.938607Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7593708590310494091:2118], cacheItem# { Subscriber: { Subscriber: [7:7593708603195396980:2855] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:41.938717Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7593708603195396995:2858], recipient# [7:7593708603195396970:2309], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:42.496300Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7593708590310494091:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:42.496469Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7593708590310494091:2118], cacheItem# { Subscriber: { Subscriber: [7:7593708594605462172:2705] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:42.496571Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7593708607490364296:2862], recipient# [7:7593708607490364295:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:42.937543Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7593708590310494091:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:42.937650Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7593708590310494091:2118], cacheItem# { Subscriber: { Subscriber: [7:7593708603195396972:2854] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:42.937752Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7593708607490364304:2863], recipient# [7:7593708607490364303:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 27926, MsgBus: 10069 2026-01-10T12:27:14.248116Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708485352707988:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:14.248169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:14.299490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:14.605640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:14.605743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:14.698652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:14.704176Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:14.705311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:14.734699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708485352707921:2081] 1768048034223802 != 1768048034223805 2026-01-10T12:27:14.831885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:14.831906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:14.831912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:14.832002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:14.894221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:15.263726Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:15.299752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:15.308243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:15.352905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.566668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.746826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.824879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:17.775133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708498237611688:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.775251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.775681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708498237611698:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:17.775716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.067756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.106255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.142958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.171661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.198177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.238324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.281347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.326363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.429612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708502532579882:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.429668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.429768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708502532579887:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.429999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708502532579889:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.430046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.433693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:18.451120Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708502532579891:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:18.506228Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708502532579942:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:19.251365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708485352707988:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:19.251437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... ts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61163, MsgBus: 65292 2026-01-10T12:27:36.272744Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708580575905228:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:36.272799Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:36.283781Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:36.371120Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:36.372508Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:36.372598Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:36.372654Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708580575905199:2081] 1768048056271870 != 1768048056271873 2026-01-10T12:27:36.414220Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:36.437820Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:36.482945Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:36.482974Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:36.482982Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:36.483081Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:37.054232Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:37.062724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:37.068934Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:37.175535Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:37.319178Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:37.383938Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:37.478400Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.187129Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708597755776252:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.187235Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.187503Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708597755776262:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.187555Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.294555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.331355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.362202Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.405932Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.446567Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.501308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.549760Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.604141Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.697764Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708597755777138:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.697917Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.697972Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708597755777143:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.698180Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708597755777145:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.698233Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:40.702155Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:40.712116Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708597755777146:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:40.802244Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708597755777198:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:41.272964Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708580575905228:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:41.273036Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 334 Consumed units: 6 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-10T12:27:23.076163Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.006835s 2026-01-10T12:27:23.106641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708523575866896:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:23.115504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:23.383131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:23.607890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:23.608115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:23.623810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:23.637767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:23.641981Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:23.908265Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593708523575867112:2123] Handle TEvNavigate describe path dc-1 2026-01-10T12:27:23.908322Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593708523575867607:2449] HANDLE EvNavigateScheme dc-1 2026-01-10T12:27:23.908412Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708523575867121:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:23.908488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593708523575867314:2245][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593708523575867121:2126], cookie# 1 2026-01-10T12:27:23.909797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708523575867323:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867320:2245], cookie# 1 2026-01-10T12:27:23.909840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708523575867324:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867321:2245], cookie# 1 2026-01-10T12:27:23.909950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708519280899495:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867323:2245], cookie# 1 2026-01-10T12:27:23.909989Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708519280899498:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867324:2245], cookie# 1 2026-01-10T12:27:23.910029Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708523575867325:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867322:2245], cookie# 1 2026-01-10T12:27:23.910067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708523575867323:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708519280899495:2051], cookie# 1 2026-01-10T12:27:23.910080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708523575867324:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708519280899498:2054], cookie# 1 2026-01-10T12:27:23.910106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708523575867314:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708523575867320:2245], cookie# 1 2026-01-10T12:27:23.910138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593708523575867314:2245][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:27:23.910182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708523575867314:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708523575867321:2245], cookie# 1 2026-01-10T12:27:23.910214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593708523575867314:2245][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:27:23.910261Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708519280899501:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708523575867325:2245], cookie# 1 2026-01-10T12:27:23.910310Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593708523575867121:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:27:23.913260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708523575867325:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708519280899501:2057], cookie# 1 2026-01-10T12:27:23.913299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708523575867314:2245][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708523575867322:2245], cookie# 1 2026-01-10T12:27:23.913317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593708523575867314:2245][/dc-1] Sync cookie mismatch: sender# [1:7593708523575867322:2245], cookie# 1, current cookie# 0 2026-01-10T12:27:23.919915Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593708523575867121:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593708523575867314:2245] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:23.920103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593708523575867121:2126], cacheItem# { Subscriber: { Subscriber: [1:7593708523575867314:2245] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:27:23.926824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593708523575867608:2450], recipient# [1:7593708523575867607:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:27:23.926964Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593708523575867607:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:27:23.975502Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593708523575867607:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:27:23.979725Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593708523575867607:2449] Handle TEvDescribeSchemeResult Forward to# [1:7593708523575867606:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:27:23.986292Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593708523575867112:2123] Handle TEvProposeTransaction 2026-01-10T12:27:23.986323Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593708523575867112:2123] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:27:23.987326Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593708523575867112:2123] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593708523575867617:2458] 2026-01-10T12:27:24.115974Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708523575867121:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true Sh ... 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:42.964912Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [6:7593708593853777069:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [6:7593708606738680172:2763] 2026-01-10T12:27:42.964919Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7593708593853777398:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-10T12:27:42.964925Z node 6 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [6:7593708593853777069:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:27:42.964972Z node 6 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [6:7593708593853777069:2053] Subscribe: subscriber# [6:7593708606738680172:2763], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:27:42.964991Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7593708593853777398:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7593708606738680153:2761] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:42.965007Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7593708593853777069:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7593708606738680165:2762] 2026-01-10T12:27:42.965033Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [6:7593708593853777072:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [6:7593708606738680173:2763] 2026-01-10T12:27:42.965045Z node 6 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [6:7593708593853777072:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-10T12:27:42.965060Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593708593853777398:2130], cacheItem# { Subscriber: { Subscriber: [6:7593708606738680153:2761] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:42.965074Z node 6 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [6:7593708593853777072:2056] Subscribe: subscriber# [6:7593708606738680173:2763], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-10T12:27:42.965107Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7593708606738680171:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708593853777066:2050] 2026-01-10T12:27:42.965131Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7593708606738680172:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708593853777069:2053] 2026-01-10T12:27:42.965153Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][6:7593708606738680173:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708593853777072:2056] 2026-01-10T12:27:42.965191Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7593708606738680155:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708606738680168:2763] 2026-01-10T12:27:42.965194Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593708606738680174:2764], recipient# [6:7593708606738680148:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:42.965221Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7593708606738680155:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708606738680169:2763] 2026-01-10T12:27:42.965249Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][6:7593708606738680155:2763][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [6:7593708593853777398:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:42.965255Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7593708593853777066:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7593708606738680171:2763] 2026-01-10T12:27:42.965274Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7593708606738680155:2763][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7593708606738680170:2763] 2026-01-10T12:27:42.965280Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7593708593853777069:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7593708606738680172:2763] 2026-01-10T12:27:42.965301Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7593708593853777072:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7593708606738680173:2763] 2026-01-10T12:27:42.965305Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7593708606738680155:2763][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7593708593853777398:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:42.965353Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7593708593853777398:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-10T12:27:42.965482Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7593708593853777398:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7593708606738680155:2763] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:42.965580Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593708593853777398:2130], cacheItem# { Subscriber: { Subscriber: [6:7593708606738680155:2763] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:42.965725Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593708606738680175:2765], recipient# [6:7593708606738680150:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:43.142570Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7593708593853777398:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:43.142737Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7593708593853777398:2130], cacheItem# { Subscriber: { Subscriber: [6:7593708598148745503:2746] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:43.142845Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7593708611033647473:2766], recipient# [6:7593708611033647472:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] Test command err: Trying to start YDB, gRPC: 3192, MsgBus: 17721 2026-01-10T12:26:55.048667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708403574307319:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:55.048724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:55.343743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:55.343840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:55.358308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:55.421568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:55.424178Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:55.518852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:55.518871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:55.518877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:55.519201Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:55.694132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:55.958161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:56.064211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:58.329316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708416459210014:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.329512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.329961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708416459210051:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.330005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708416459210052:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.330141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:58.334467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:58.346198Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708416459210055:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:26:58.478987Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708416459210106:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:58.775404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.293409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-10T12:26:59.309926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.562784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:59.574534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:26:59.582502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2026-01-10T12:26:59.583612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 61584, MsgBus: 30059 2026-01-10T12:27:00.597359Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708423659278899:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:00.597750Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:00.636156Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:00.723498Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708423659278850:2081] 1768048020577785 != 1768048020577788 2026-01-10T12:27:00.730889Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:00.738576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:00.738649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:00.763255Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:00.819466Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:00.972765Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:00.972796Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:00.972811Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:00.972916Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:01.565626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:01.577197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:01.598079Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:04.220834Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708440839148899:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.220833Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708440839148926:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.220916Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.221174Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708440839148930:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:04.221225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, ... 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708574999598272:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:35.458763Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:35.501838Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:35.568324Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708553524760878:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:35.568409Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:35.998058Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.218250Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:36.223649Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-10T12:27:36.225003Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:36.631499Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708579294566246:2940] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:36.659251Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:37.557605Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:37.568011Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710678, at schemeshard: 72057594046644480 2026-01-10T12:27:37.569293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 31882, MsgBus: 5230 2026-01-10T12:27:39.054372Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708591920212112:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:39.068885Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:39.079889Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:39.195788Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:39.208199Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708591920211940:2081] 1768048059010810 != 1768048059010813 2026-01-10T12:27:39.211817Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:39.211921Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:39.246665Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:39.360183Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:39.380792Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:39.380828Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:39.380840Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:39.380938Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:39.952980Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:39.960785Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:40.053999Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:43.733141Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708609100082012:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.733170Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708609100082020:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.733253Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.733616Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708609100082027:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.733719Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.737121Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:43.750204Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708609100082026:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:27:43.835495Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708609100082079:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:43.929814Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:44.043007Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708591920212112:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:44.043108Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:44.120337Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:44.129955Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:27:44.137962Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpQuery::QueryFromSqs [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> KqpExplain::CreateTableAs-Stats [GOOD] >> KqpBatchDelete::ManyPartitions_2 [GOOD] >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink >> TestShred::ShredManualLaunch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 20570, MsgBus: 5012 2026-01-10T12:27:15.388223Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708488534459012:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:15.388458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:15.736982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:15.741999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:15.742142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:15.744567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:15.826896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:15.829396Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708488534458973:2081] 1768048035385937 != 1768048035385940 2026-01-10T12:27:15.930052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:15.930096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:15.930113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:15.930223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:16.013450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:16.374237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:16.383671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:16.399111Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:16.447264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.629557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.814519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:16.902155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.797848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708501419362737:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.797998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.798464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708501419362747:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:18.798518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.124182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.153041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.180416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.208824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.237796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.278753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.315282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.365988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:19.454169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708505714330915:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.454276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.454729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708505714330920:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.454743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708505714330921:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.454786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:19.458540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:19.469566Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708505714330924:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:19.590993Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708505714330975:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:20.391436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708488534459012:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:20.391508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:21.373976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... B, gRPC: 22088, MsgBus: 31808 2026-01-10T12:27:39.165420Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708591694391668:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:39.181125Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:39.186014Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:39.296214Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:39.297455Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708591694391596:2081] 1768048059131799 != 1768048059131802 2026-01-10T12:27:39.315770Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:39.315883Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:39.359919Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:39.468389Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:39.468421Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:39.468431Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:39.468520Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:39.476221Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:40.024698Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:40.045118Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.136441Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.270478Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:40.327391Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:40.411845Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.471663Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708608874262656:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.471759Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.472013Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708608874262665:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.472055Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.543240Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.574956Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.607376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.640801Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.675475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.712959Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.754512Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.808671Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:43.903042Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708608874263537:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.903132Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708608874263542:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.903173Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.903470Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593708608874263545:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.903574Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:43.906967Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:43.917725Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593708608874263544:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:44.016471Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593708613169230893:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:44.158712Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708591694391668:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:44.158793Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:45.957960Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2026-01-10T12:27:39.625747Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.625898Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.647261Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.647445Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.683505Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.696897Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=5194538116137815112, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:27:39.697923Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:39.721219Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=5194538116137815112) 2026-01-10T12:27:39.721987Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=1654637453948447370, path="/Root/Res", config={ }) 2026-01-10T12:27:39.722249Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-10T12:27:39.737001Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=1654637453948447370) 2026-01-10T12:27:39.745249Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:152:2174]. Cookie: 15237867819989458621. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:39.745350Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:152:2174], cookie=15237867819989458621) 2026-01-10T12:27:39.748312Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:152:2174]. Cookie: 11923555155835883272. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2026-01-10T12:27:39.748393Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:152:2174], cookie=11923555155835883272) 2026-01-10T12:27:42.040117Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:42.040277Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:42.069308Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:42.070076Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:42.107474Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:42.108129Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:136:2161], cookie=7338662080931010187, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:27:42.108480Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:42.121114Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:136:2161], cookie=7338662080931010187) 2026-01-10T12:27:42.122262Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 6255630409391007085. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:42.122334Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=6255630409391007085) 2026-01-10T12:27:42.123193Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 8755144198474990608. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:42.123290Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=8755144198474990608) 2026-01-10T12:27:42.123932Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 4935748965716974059. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-10T12:27:42.124010Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=4935748965716974059) 2026-01-10T12:27:42.124701Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 2184159803734558228. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-10T12:27:42.124773Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=2184159803734558228) 2026-01-10T12:27:44.307851Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:44.308010Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:44.326235Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:44.326386Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:44.361915Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:44.362602Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=18416686998142529574, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-10T12:27:44.362975Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:44.375322Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=18416686998142529574) 2026-01-10T12:27:44.376015Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=13319939660624010079, path="/Root/Res1", config={ }) 2026-01-10T12:27:44.376329Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2026-01-10T12:27:44.388836Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=13319939660624010079) 2026-01-10T12:27:44.389534Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:152:2174], cookie=4526309842488639515, path="/Root/Res2", config={ }) 2026-01-10T12:27:44.389803Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2026-01-10T12:27:44.402366Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:152:2174], cookie=4526309842488639515) 2026-01-10T12:27:44.403443Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 11126022115860260157. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:44.403519Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=11126022115860260157) 2026-01-10T12:27:44.404314Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 8876015084694990875. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:44.404382Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=8876015084694990875) 2026-01-10T12:27:44.404984Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:157:2179]. Cookie: 15253741754224341552. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2026-01-10T12:27:44.405041Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:157:2179], cookie=15253741754224341552) 2026-01-10T12:27:46.587660Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:46.587810Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:46.609245Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:46.609374Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:46.628576Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:46.629123Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=18072510780360159302, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-10T12:27:46.629497Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:46.652307Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=18072510780360159302) 2026-01-10T12:27:46.653530Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 9281999031636609551. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:46.653618Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=9281999031636609551) 2026-01-10T12:27:46.654230Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 14334465103215022544. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2026-01-10T12:27:46.654291Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=14334465103215022544) 2026-01-10T12:27:48.865191Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:48.865300Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:48.893150Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:48.893534Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:48.922834Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:48.923426Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2161], cookie=8417766880977218825, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-10T12:27:48.923688Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:27:48.936145Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2161], cookie=8417766880977218825) 2026-01-10T12:27:48.936739Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=10919642102802958160, path="/Root/Res", config={ }) 2026-01-10T12:27:48.936990Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-10T12:27:48.949243Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=10919642102802958160) 2026-01-10T12:27:48.950053Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 17114109265203612003. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:27:48.950110Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=17114109265203612003) 2026-01-10T12:27:48.950636Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:155:2177], cookie=11148289040326202899, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2026-01-10T12:27:48.950827Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2026-01-10T12:27:48.951043Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2026-01-10T12:27:48.963810Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:155:2177], cookie=11148289040326202899) 2026-01-10T12:27:48.964502Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 13705670892290490717. Data: { } 2026-01-10T12:27:48.964574Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=13705670892290490717) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TOlap::CreateTableTtl [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:43.698443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:43.698515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:43.698544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:43.698575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:43.698602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:43.698622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:43.698689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:43.698738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:43.699491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:43.699757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:43.779802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:43.779877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:43.790657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:43.790745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:43.790884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:43.798396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:43.798658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:43.799438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:43.799687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:43.802048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:43.802188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:43.803382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:43.803447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:43.803514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:43.803556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:43.803598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:43.803889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:43.938257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.939939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:43.940452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553241, Sender [1:1086:3004], Recipient [1:871:2822]: NKikimrTxDataShard.TEvVacuumResult VacuumGeneration: 1 TabletId: 72075186233409550 Status: OK 2026-01-10T12:27:47.357354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5465: StateWork, processing event TEvDataShard::TEvVacuumResult 2026-01-10T12:27:47.357423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:546: TTxCompleteShredShard Execute at schemestard: 72075186233409546 2026-01-10T12:27:47.357507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__tenant_shred_manager.cpp:309: [TenantShredManager] [Finished] Shred is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 32], tabletId# 72075186233409550, shardIdx# 72075186233409546:5 in# 85 ms, next wakeup in# 14.915000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2026-01-10T12:27:47.357592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:326: [TenantShredManager] Shred in shards is completed. Send response to root schemeshard 2026-01-10T12:27:47.357622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:349: [TenantShredManager] Complete: Generation# 1 2026-01-10T12:27:47.360239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2026-01-10T12:27:47.360634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3061:4674], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:27:47.360684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:27:47.360716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:27:47.360931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125514, Sender [1:871:2822], Recipient [1:702:2686]: NKikimrScheme.TEvTenantShredResponse PathId { OwnerId: 72057594046678944 LocalId: 38 } Generation: 1 Status: COMPLETED 2026-01-10T12:27:47.360967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvTenantShredResponse 2026-01-10T12:27:47.361038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2026-01-10T12:27:47.361095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 86 ms, next wakeup# 599.914000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-10T12:27:47.361177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-10T12:27:47.361443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3060:4673], Recipient [1:871:2822]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:3061:4674] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-10T12:27:47.361478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:27:47.361509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2026-01-10T12:27:47.363648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-10T12:27:47.363724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:27:47.364251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3065:4678], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3066:4679] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:27:47.364297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-10T12:27:47.364331Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-10T12:27:47.364487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-10T12:27:47.364520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:27:47.364564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:27:47.364641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:27:47.364683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-10T12:27:47.364751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:27:47.364813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:27:48.440679Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:27:48.440764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:27:48.440831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:27:48.441014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:27:48.441055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:27:48.441214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-10T12:27:48.441247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:27:48.441285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:27:48.441363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:27:48.441409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-10T12:27:48.441472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-10T12:27:48.441543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-10T12:27:49.096814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:27:49.096916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-10T12:27:49.097070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:27:49.097102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-10T12:27:49.097132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-10T12:27:49.097275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:702:2686], Recipient [1:702:2686]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:27:49.097325Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-10T12:27:49.097513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:708:2690], Recipient [1:702:2686]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-10T12:27:49.097548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-10T12:27:49.097575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-10T12:27:49.097641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-10T12:27:49.097673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-10T12:27:49.097716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-10T12:27:49.100270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-10T12:27:49.100981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3115:4728], Recipient [1:702:2686]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:27:49.101042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:27:49.101082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-10T12:27:49.101248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:685:2675], Recipient [1:702:2686]: NKikimrScheme.TEvShredInfoRequest 2026-01-10T12:27:49.101282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-10T12:27:49.101319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 16821, MsgBus: 15004 2026-01-10T12:27:11.456665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708472474962121:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:11.456838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:11.732775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:11.732888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:11.735025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:11.820918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:11.865839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:11.873423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708472474961901:2081] 1768048031382774 != 1768048031382777 2026-01-10T12:27:11.907367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:11.907395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:11.907403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:11.907537Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:11.950613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:12.373741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:12.435568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:12.455577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:12.601993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:12.774074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:12.869688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.013281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489654832953:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.013419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.014209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489654832963:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.014307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.348386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.422009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.496593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.536473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.587224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.640426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.692554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.762569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.853482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489654833838:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.853549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.853806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489654833843:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.853823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489654833844:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.853891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.858433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:15.874403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-10T12:27:15.876419Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708489654833847:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:15.962028Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708489654833898:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:16.455165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708472474962121:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:16.455232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Nam ... deId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-01-10T12:27:41.774982Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708579601264516:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:41.775066Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13473, MsgBus: 64853 2026-01-10T12:27:42.499367Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708604285371600:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:42.499453Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:42.520471Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:42.608233Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:42.635268Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:42.635384Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:42.656402Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:42.700169Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:42.708166Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:42.708196Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:42.708207Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:42.708319Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:43.201560Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:43.505653Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:46.925794Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708621465241637:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:46.925797Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708621465241645:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:46.925898Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:46.926234Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708621465241652:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:46.926328Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:46.930048Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:46.942205Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708621465241651:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:27:47.041868Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708625760209000:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:47.081165Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-01-10T12:27:47.499701Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708604285371600:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:47.499783Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::MixedCreateAsSelect >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.909166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.909282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.909347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.909395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.909440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.909475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.909530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.909611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.910588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.010596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.010665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.027665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.027807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.028031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.038368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.040519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.083011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.091305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.091544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.091669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.289527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.292016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000042 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2026-01-10T12:27:49.814378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:27:49.814791Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:49.815270Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 38], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:27:49.815358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 0 2026-01-10T12:27:49.815403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 106:0 type: TxCreateColumnTable target path: [OwnerId: 72057594046678944, LocalPathId: 43] source path: 2026-01-10T12:27:49.815680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:27:49.816068Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:27:49.816204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:747) 2026-01-10T12:27:49.816361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-10T12:27:49.816453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:27:49.818957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 43, at schemeshard: 72057594046678944 2026-01-10T12:27:49.819289Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2026-01-10T12:27:49.819611Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:49.819666Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:27:49.819946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:27:49.820102Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:49.820165Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-10T12:27:49.820235Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 106, path id: 43 2026-01-10T12:27:49.820675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:49.820744Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:237: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2026-01-10T12:27:49.820946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:323: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2026-01-10T12:27:49.821840Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:49.821982Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:49.822055Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:27:49.822118Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-10T12:27:49.822170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-10T12:27:49.823831Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:49.823921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:49.823983Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:27:49.824022Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 1 2026-01-10T12:27:49.824055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-10T12:27:49.824160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-10T12:27:49.825939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2026-01-10T12:27:49.826078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2026-01-10T12:27:49.826745Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;self_id=[3:713:2699];ev=NActors::IEventHandle;tablet_id=72075186233409546;tx_id=106;this=136981566015232;method=TTxController::StartProposeOnExecute;tx_info=106:TX_KIND_SCHEMA;min=5000043;max=18446744073709551615;plan=0;src=[3:129:2154];cookie=12:5;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-10T12:27:49.827175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:27:49.827913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 5025, MsgBus: 28365 2026-01-10T12:25:18.998191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707987240262698:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:18.999062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:19.246538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:19.278393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:19.278531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:19.334287Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:19.352854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707987240262670:2081] 1768047918996707 != 1768047918996710 2026-01-10T12:25:19.354684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:19.432026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:19.432058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:19.432067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:19.432194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:19.442775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:19.854963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:19.904963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:19.990802Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:20.051377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:20.216253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:20.288181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.325178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708004420133727:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.325338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.325746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708004420133737:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.325801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.606935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.642865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.675557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.709201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.745391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.782624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.823252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.867433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:22.936992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708004420134604:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.937096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.937503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708004420134609:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.937584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708004420134610:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.937630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:22.941946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:22.955373Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708004420134613:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:23.044178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708008715101962:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:23.999851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707987240262698:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:23.999909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:24.633337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... 8Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7593708572566945030:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:34.812173Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:34.844182Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:34.979910Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:34.986655Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593708572566944997:2081] 1768048054811559 != 1768048054811562 2026-01-10T12:27:35.011556Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:35.011703Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:35.054741Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:35.117723Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:35.137043Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:35.137082Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:35.137099Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:35.137225Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:35.820225Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:36.033228Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:36.055816Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.147828Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.422842Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:36.550780Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:39.812460Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708572566945030:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:39.815607Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:41.292196Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708602631717956:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:41.292359Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:41.292903Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708602631717966:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:41.293018Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:41.480964Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.530615Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.588354Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.656260Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.712073Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.766301Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.832209Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:41.909979Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:42.126567Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708606926686160:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:42.126675Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:42.127053Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708606926686166:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:42.127117Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708606926686165:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:42.127159Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:42.133369Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:42.147966Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708606926686169:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:42.221103Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708606926686220:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:45.089127Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots |95.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.907823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.907940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.907996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.908040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.908103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.908135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.908213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.909108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.002072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.002127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.016472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.016554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.018594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.029484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.040563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.083070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.091331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.091566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.091633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.279422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.281929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard::TEvProposeTransactionResult> complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:50.682938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:50.683009Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:149: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2026-01-10T12:27:50.683104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2026-01-10T12:27:50.683325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000042 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:27:50.685295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2026-01-10T12:27:50.685447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000043 2026-01-10T12:27:50.685876Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:50.686031Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 12884904049 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:27:50.686107Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:110: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000043 2026-01-10T12:27:50.687094Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 128 -> 129 2026-01-10T12:27:50.687367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:27:50.687445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:27:50.688429Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=column_engine_logs.cpp:113;event=double_schema_version;v=1; FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000043 2026-01-10T12:27:50.704164Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:50.704231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:27:50.704475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:27:50.704713Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:50.704797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-10T12:27:50.704874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 106, path id: 39 2026-01-10T12:27:50.704970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:50.705024Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:200: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:27:50.705103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:223: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-10T12:27:50.706764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:50.706892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:50.706934Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:27:50.706979Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-10T12:27:50.707044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:27:50.708349Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:50.708435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-10T12:27:50.708463Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-10T12:27:50.708492Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 14 2026-01-10T12:27:50.708524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:27:50.708600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-10T12:27:50.712042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-10T12:27:50.713675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:27:50.714727Z node 3 :TX_TIERING ERROR: log.cpp:841: fline=manager.cpp:170;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2026-01-10T12:27:50.714886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:27:50.727348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-01-10T12:27:50.727418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-01-10T12:27:50.727570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2026-01-10T12:27:50.733129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:50.733381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-10T12:27:50.733438Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-10T12:27:50.733573Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:27:50.733616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:27:50.733665Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-10T12:27:50.733729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:27:50.733826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-10T12:27:50.733931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:743:2720] message: TxId: 106 2026-01-10T12:27:50.734000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-10T12:27:50.734041Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-10T12:27:50.734095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-10T12:27:50.734244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:27:50.736781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:27:50.736863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:953:2921] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> KqpDataIntegrityTrails::Ddl |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink >> TOlapNaming::CreateColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnStoreOk [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2026-01-10T12:27:23.260737Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708524545023445:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:23.268438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:23.324171Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007692s 2026-01-10T12:27:23.660361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:27:23.689252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:23.689482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:23.703639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:23.795914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:23.917725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:27:24.113066Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7593708524545023645:2147] Handle TEvNavigate describe path dc-1 2026-01-10T12:27:24.113158Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7593708528839991419:2467] HANDLE EvNavigateScheme dc-1 2026-01-10T12:27:24.113298Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708524545023651:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:24.113427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7593708524545023882:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7593708524545023651:2149], cookie# 1 2026-01-10T12:27:24.115203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708524545023939:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023936:2299], cookie# 1 2026-01-10T12:27:24.115265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708524545023940:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023937:2299], cookie# 1 2026-01-10T12:27:24.115308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7593708524545023941:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023938:2299], cookie# 1 2026-01-10T12:27:24.115369Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708520250055990:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023939:2299], cookie# 1 2026-01-10T12:27:24.115411Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708520250055993:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023940:2299], cookie# 1 2026-01-10T12:27:24.115430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7593708520250055996:2060] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7593708524545023941:2299], cookie# 1 2026-01-10T12:27:24.115480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708524545023939:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708520250055990:2054], cookie# 1 2026-01-10T12:27:24.115500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708524545023940:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708520250055993:2057], cookie# 1 2026-01-10T12:27:24.115516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7593708524545023941:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708520250055996:2060], cookie# 1 2026-01-10T12:27:24.115563Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708524545023882:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708524545023936:2299], cookie# 1 2026-01-10T12:27:24.115590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7593708524545023882:2299][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-10T12:27:24.115623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708524545023882:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708524545023937:2299], cookie# 1 2026-01-10T12:27:24.115673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7593708524545023882:2299][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-10T12:27:24.115719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7593708524545023882:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7593708524545023938:2299], cookie# 1 2026-01-10T12:27:24.115741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7593708524545023882:2299][/dc-1] Sync cookie mismatch: sender# [1:7593708524545023938:2299], cookie# 1, current cookie# 0 2026-01-10T12:27:24.115796Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7593708524545023651:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-10T12:27:24.128787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7593708524545023651:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7593708524545023882:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-10T12:27:24.128961Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7593708524545023651:2149], cacheItem# { Subscriber: { Subscriber: [1:7593708524545023882:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-10T12:27:24.132178Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7593708528839991421:2469], recipient# [1:7593708528839991419:2467], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:27:24.132261Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7593708528839991419:2467] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:27:24.177092Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7593708528839991419:2467] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-10T12:27:24.180720Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7593708528839991419:2467] Handle TEvDescribeSchemeResult Forward to# [1:7593708528839991418:2466] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-10T12:27:24.185404Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7593708524545023645:2147] Handle TEvProposeTransaction 2026-01-10T12:27:24.185429Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7593708524545023645:2147] TxId# 281474976710657 ProcessProposeTransaction 2026-01-10T12:27:24.185492Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7593708524545023645:2147] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7593708528839991427:2473] 2026-01-10T12:27:24.271561Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7593708524545023651:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ... 615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:51.245534Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7593708646478428198:2251], recipient# [14:7593708646478428197:2320], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:51.245720Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:27:51.452317Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7593708620708624077:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:51.452508Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7593708620708624077:2107], cacheItem# { Subscriber: { Subscriber: [14:7593708642183460824:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:51.452577Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7593708620708624077:2107], cacheItem# { Subscriber: { Subscriber: [14:7593708642183460825:2243] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-10T12:27:51.452737Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7593708646478428199:2252], recipient# [14:7593708642183460821:2315], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-10T12:27:51.453049Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7593708642183460821:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:27:51.526153Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7593708642183460806:2241] 2026-01-10T12:27:51.526238Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526264Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7593708642183460807:2241] 2026-01-10T12:27:51.526288Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526500Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7593708642183460808:2241] 2026-01-10T12:27:51.526528Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460805:2241][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526632Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7593708642183460827:2242] 2026-01-10T12:27:51.526665Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526688Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7593708642183460828:2242] 2026-01-10T12:27:51.526714Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526732Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7593708642183460829:2242] 2026-01-10T12:27:51.526756Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460824:2242][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526839Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7593708642183460833:2243] 2026-01-10T12:27:51.526865Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526884Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7593708642183460834:2243] 2026-01-10T12:27:51.526905Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-10T12:27:51.526922Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7593708642183460835:2243] 2026-01-10T12:27:51.526944Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7593708642183460825:2243][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7593708620708624077:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.908020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.908166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.908277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.908329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.908363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.908426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.909533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.011367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.011427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.026042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.026156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.026369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.035866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.040494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.082756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.082982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.091405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.091608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.091655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.316876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.317958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.318954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.319019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 129 2026-01-10T12:27:53.731234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:27:53.731316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:27:53.732651Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 3 Name: "comment" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-10T12:27:53.732780Z node 2 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=5000040;tx_id=103;;switch_optimizer=0;switch_accessors=1; FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-10T12:27:53.734266Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:53.734300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:53.734431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:27:53.734574Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:53.734605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-10T12:27:53.734639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-10T12:27:53.734970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.735014Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:27:53.735067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-10T12:27:53.735133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:27:53.735811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.735896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.735926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:27:53.735957Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-10T12:27:53.735988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:27:53.736845Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.736901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.736921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:27:53.736944Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-10T12:27:53.736965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-10T12:27:53.738312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-10T12:27:53.739538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:53.739580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:27:53.741063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:27:53.741526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:27:53.741576Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:53.741619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-10T12:27:53.742067Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.742142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-10T12:27:53.742174Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-10T12:27:53.742204Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-10T12:27:53.742235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:27:53.742316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-10T12:27:53.743552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-10T12:27:53.756199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-10T12:27:53.756260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2026-01-10T12:27:53.756371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-10T12:27:53.756420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 103 2026-01-10T12:27:53.759427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.759576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.759621Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-10T12:27:53.759726Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:27:53.759761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:27:53.759799Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-10T12:27:53.759835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:27:53.759871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-10T12:27:53.759941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:747:2723] message: TxId: 103 2026-01-10T12:27:53.759986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-10T12:27:53.760023Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-10T12:27:53.760055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-10T12:27:53.760197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:27:53.761816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:27:53.761860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:848:2816] TestWaitNotification: OK eventTxId 103 >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.908432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.908553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.908661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.908708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.908741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.908800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.909850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.018472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.018540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.035888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.036007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.036245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.045771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.046088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.083003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.092151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.092228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.092311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.092360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.092428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.279427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.282958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.283375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-10T12:27:53.668578Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:53.668691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:27:53.668743Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:27:53.669015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:27:53.669072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:27:53.669259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:27:53.669329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:27:53.670898Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:53.670944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:53.671141Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:53.671188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-10T12:27:53.671502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.671551Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-10T12:27:53.671741Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:27:53.671782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:27:53.671823Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:27:53.671858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:27:53.671897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:27:53.671941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:27:53.671980Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:27:53.672009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:27:53.672091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:27:53.672135Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-10T12:27:53.672174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-10T12:27:53.672683Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:27:53.672783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-10T12:27:53.672827Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-10T12:27:53.672869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-10T12:27:53.672915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:27:53.673005Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-10T12:27:53.677832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-10T12:27:53.678308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-10T12:27:53.679204Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-01-10T12:27:53.680321Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-01-10T12:27:53.683209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:27:53.683554Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.683802Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-10T12:27:53.684887Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-10T12:27:53.687185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:27:53.687415Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-10T12:27:53.688047Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-10T12:27:53.688274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-10T12:27:53.688320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-10T12:27:53.688684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-10T12:27:53.688769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:27:53.688881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2684] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-10T12:27:53.692022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:27:53.692277Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.692467Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2026-01-10T12:27:53.694228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:27:53.694399Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:27:53.694636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:27:53.694668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:27:53.694957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:27:53.695024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:27:53.695054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:702:2691] TestWaitNotification: OK eventTxId 102 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |95.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.907839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.907943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.908051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.908110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.908146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.908215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.909226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.015136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.015213Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.033036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.033156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.033376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.043202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.043546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.083038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.093183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.093303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.093409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.093479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.093562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.093881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.295194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.296952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.297017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.297091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.297186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.297281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.297353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Count reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:27:53.955702Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 2026-01-10T12:27:53.961778Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:53.961851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:53.962067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:27:53.962237Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:53.962289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-10T12:27:53.962355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-10T12:27:53.962432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.962489Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:27:53.962561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-10T12:27:53.963874Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:27:53.963999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:27:53.964043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:27:53.964503Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-10T12:27:53.964571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:27:53.965980Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:27:53.966068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-10T12:27:53.966103Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-10T12:27:53.966138Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-10T12:27:53.966179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:27:53.966269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-10T12:27:53.973233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-10T12:27:53.974737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:27:53.975028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-10T12:27:53.987453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-10T12:27:53.987531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-10T12:27:53.987660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:27:53.990468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.990640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-10T12:27:53.990691Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-10T12:27:53.990828Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:27:53.990890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:27:53.990940Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-10T12:27:53.990982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:27:53.991027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-10T12:27:53.991111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:749:2725] message: TxId: 101 2026-01-10T12:27:53.991167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-10T12:27:53.991214Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-10T12:27:53.991252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-10T12:27:53.991417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:27:53.993291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-10T12:27:53.993356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:750:2726] TestWaitNotification: OK eventTxId 101 2026-01-10T12:27:53.993905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:27:53.994167Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 296us result status StatusSuccess 2026-01-10T12:27:53.994966Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26728, MsgBus: 32405 |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61423, MsgBus: 12720 |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6460, MsgBus: 27691 |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 21814, MsgBus: 6318 |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.913986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.914077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.914139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.914187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.914233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.914263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.914321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.914421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.915367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.915692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.008357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.008430Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.024151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.024262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.024463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.038878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.040538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.073919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.083061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.091305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.091583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.091679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.301789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.305979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.306981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.307057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.307145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2026-01-10T12:27:57.094352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:27:57.094376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-10T12:27:57.094461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-10T12:27:57.094485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-10T12:27:57.094582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-10T12:27:57.094605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-10T12:27:57.094670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-10T12:27:57.094695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-10T12:27:57.094740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:21 2026-01-10T12:27:57.094762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2026-01-10T12:27:57.099986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2026-01-10T12:27:57.100041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2026-01-10T12:27:57.101187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:17 2026-01-10T12:27:57.101237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2026-01-10T12:27:57.101343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-10T12:27:57.101368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2026-01-10T12:27:57.101477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:13 2026-01-10T12:27:57.101503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2026-01-10T12:27:57.101578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2026-01-10T12:27:57.101602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2026-01-10T12:27:57.101683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:9 2026-01-10T12:27:57.101708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2026-01-10T12:27:57.101813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2026-01-10T12:27:57.101840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2026-01-10T12:27:57.101913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:38 2026-01-10T12:27:57.101936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2026-01-10T12:27:57.103138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:40 2026-01-10T12:27:57.103182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2026-01-10T12:27:57.103255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2026-01-10T12:27:57.103281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2026-01-10T12:27:57.103334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:34 2026-01-10T12:27:57.103359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2026-01-10T12:27:57.103415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2026-01-10T12:27:57.103440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2026-01-10T12:27:57.103482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:30 2026-01-10T12:27:57.103503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2026-01-10T12:27:57.103547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:26 2026-01-10T12:27:57.103571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2026-01-10T12:27:57.103620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-10T12:27:57.103644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-01-10T12:27:57.103692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:57 2026-01-10T12:27:57.103715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2026-01-10T12:27:57.118335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:53 2026-01-10T12:27:57.118408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2026-01-10T12:27:57.118505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:55 2026-01-10T12:27:57.118530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2026-01-10T12:27:57.118599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:49 2026-01-10T12:27:57.118623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2026-01-10T12:27:57.118680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:47 2026-01-10T12:27:57.118704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2026-01-10T12:27:57.118761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:51 2026-01-10T12:27:57.118784Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2026-01-10T12:27:57.118841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:45 2026-01-10T12:27:57.118865Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2026-01-10T12:27:57.118915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:43 2026-01-10T12:27:57.118936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2026-01-10T12:27:57.119011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:41 2026-01-10T12:27:57.119063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2026-01-10T12:27:57.119203Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2026-01-10T12:27:57.121043Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:27:57.121317Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 328us result status StatusPathDoesNotExist 2026-01-10T12:27:57.121490Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:27:57.122336Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 42 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-10T12:27:57.122442Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 42 took 124us result status StatusPathDoesNotExist 2026-01-10T12:27:57.122526Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] |95.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20346, MsgBus: 27357 2026-01-10T12:27:05.501170Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708447282636530:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:05.501372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:05.762415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:05.825860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:05.825986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:05.829750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:05.899626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:05.912205Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708447282636404:2081] 1768048025492707 != 1768048025492710 2026-01-10T12:27:06.000532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:06.000567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:06.000586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:06.000676Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:06.042457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:06.489727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:06.508450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:06.541404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:06.699760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:06.887130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:06.966283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.055299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708464462507476:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.055440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.055752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708464462507486:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.055809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.399070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.435199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.468356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.516144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.549154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.586994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.630101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.684934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:09.771912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708464462508354:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.772021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.772426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708464462508359:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.772484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708464462508361:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.772593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:09.776061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:09.788523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708464462508363:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:09.845704Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708464462508414:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:10.499360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708447282636530:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:10.499433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:11.584490Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MTAxNGRmZTYtZGY2ODQ5NjctMjNhYzgxYWEtZWVkMjY3ZmQ=, ActorId: [1:7593708473052443304:2533], ActorState: ExecuteState, LegacyTraceId: 01kekxzxsa0adf8hrx3j04jrw8, Create QueryResponse for error on request, msg: status# TIMEOUT issue ... ion 2026-01-10T12:27:48.357872Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:48.924628Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:48.949374Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.110320Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.524232Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.850295Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:50.547359Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1908:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.547529Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.548782Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1981:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.548948Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.580778Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:50.720536Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:51.025270Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:51.301757Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:51.572883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:51.848798Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:52.178930Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:52.481230Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:52.869244Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2793:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:52.869379Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:52.869948Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2797:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:52.870039Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:52.870123Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2800:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:52.877664Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:53.044850Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:2802:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:53.106218Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:2862:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:55.500985Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:55.793186Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:56.149326Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.318577Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [5:3565:4765], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01keky19qmans8zdx9scts59cp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MThmMTcxMDMtMjk5MjQzNzgtN2MxY2JhOWQtMzNmNGQ2NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2026-01-10T12:27:58.318781Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [5:3565:4765], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01keky19qmans8zdx9scts59cp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=MThmMTcxMDMtMjk5MjQzNzgtN2MxY2JhOWQtMzNmNGQ2NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2026-01-10T12:27:58.319991Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:3566:4766], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01keky19qmans8zdx9scts59cp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MThmMTcxMDMtMjk5MjQzNzgtN2MxY2JhOWQtMzNmNGQ2NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:3559:4455], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-10T12:27:58.320946Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MThmMTcxMDMtMjk5MjQzNzgtN2MxY2JhOWQtMzNmNGQ2NQ==, ActorId: [5:3146:4455], ActorState: ExecuteState, LegacyTraceId: 01keky19qmans8zdx9scts59cp, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Table \'/Root/SecondaryKeys\' retry limit exceeded." severity: 1 } trace_id# |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 4126, MsgBus: 1703 |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8489, MsgBus: 23808 |95.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService >> Secret::Simple |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 28930, MsgBus: 5747 |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> Secret::Deactivated >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 30013, MsgBus: 18715 |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::Validation |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6611, MsgBus: 11022 |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17219, MsgBus: 22570 |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 9239, MsgBus: 1026 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 9748, MsgBus: 8933 |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 30041, MsgBus: 6475 |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |95.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::DeactivatedQueryService >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> Secret::ValidationQueryService >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] Test command err: Trying to start YDB, gRPC: 20419, MsgBus: 22167 2026-01-10T12:27:16.959111Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708492465730586:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:16.960387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:16.979593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:17.316331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:17.376037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:17.376135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:17.460417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:17.461819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:17.462095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708492465730547:2081] 1768048036942075 != 1768048036942078 2026-01-10T12:27:17.580173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:17.594906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:17.594930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:17.594943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:17.595257Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:17.963424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:18.060496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:18.069428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:18.149629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.290995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.459953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:18.544731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:20.687120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708509645601611:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.687240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.687562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708509645601620:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:20.687601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.017224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.051458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.091087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.127427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.204463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.243893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.283750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.336846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:21.437307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708513940569793:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.437422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.437733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708513940569798:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.437782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708513940569799:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.438100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:21.441124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:21.458874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708513940569802:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:21.534335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708513940569853:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:21.951418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708492465730586:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:21.951483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... Result TxId: 281474976710669 2026-01-10T12:28:02.788289Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037953 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788336Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788338Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037972 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788388Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788400Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788432Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038002 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788448Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788487Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788494Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788535Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788541Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788591Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788601Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788641Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788657Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788714Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788734Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788767Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788783Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788827Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788833Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788873Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788882Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788920Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788936Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788965Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.788984Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789010Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789032Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789075Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789101Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789136Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789168Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789191Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789217Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789245Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789267Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.789299Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-10T12:28:02.791875Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:28:02.799475Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:28:02.879842Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708691595588579:6077] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:02.893695Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:04.176788Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-10T12:28:04.184321Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 18964, MsgBus: 18420 2026-01-10T12:25:09.845133Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707950680993812:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.845805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.119777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.124731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.124856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.153679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.257296Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.366706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.430099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.430139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.430147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.430234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.855161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.053428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.113547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.297839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.443778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.514993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.132623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967860864831:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.132750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.133115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707967860864840:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.133164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.729952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.760437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.791693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.824850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.872844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.921821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.969570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.088927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707972155833016:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.088994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.089369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707972155833019:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.089429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.089612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707972155833023:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.100502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.116675Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707972155833025:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.195891Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707972155833076:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.845632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707950680993812:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.845691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:15.986541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.093273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:25:25.093312Z node 1 :IMPORT WARN: schemeshard_import.c ... de 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:51.311811Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:51.454847Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:51.454995Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:51.459533Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:51.463673Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593708644225537030:2081] 1768048071267835 != 1768048071267838 2026-01-10T12:27:51.486232Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:51.537372Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:51.639764Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:51.639802Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:51.639821Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:51.639952Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:52.281545Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:52.491555Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:52.509507Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:52.601740Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:52.855717Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:53.032936Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:56.271071Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708644225537056:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:56.271185Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:57.684388Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708669995342695:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:57.684505Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:57.684928Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708669995342704:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:57.685020Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:57.814438Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:57.860994Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:57.910527Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:57.957029Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.005319Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.056816Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.109251Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.185995Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:58.314276Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708674290310897:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:58.314441Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:58.314865Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708674290310902:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:58.314951Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708674290310903:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:58.315015Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:58.320806Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:58.335116Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708674290310906:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:58.396868Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708674290310957:3788] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:01.355585Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:06.440904Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:06.440942Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskPileGuardFullPile >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelTests::PDiskFaultyState >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17550, MsgBus: 20621 2026-01-10T12:26:35.467654Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708318961080658:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:35.470420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:35.755298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:35.755492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:35.786847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:35.806807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:35.856351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:35.916677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:35.916701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:35.916707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:35.916789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:36.034273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:36.350440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:36.402884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.475120Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:36.562770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.703986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:36.770761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:38.750165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708331845984316:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.750277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.750824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708331845984326:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:38.750871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.112493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.154158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.187972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.223185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.262864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.309317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.358984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.405004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:39.493499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336140952504:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.493602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.493905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336140952510:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.493958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.494042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708336140952509:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:39.498431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:39.511305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708336140952513:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:39.601995Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708336140952564:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:40.464961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708318961080658:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:40.465045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12082, MsgBus: 14595 2026-01-10T12:26:42.500190Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708348351920954:2098];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:42.500290Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:42.523251Z node 2 :KQP_PROXY WARN: kqp_fin ... 2057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:45.147204Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:45.147632Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:45.147706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:45.172193Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:45.181479Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-10T12:27:45.181651Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:45.182503Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:45.289317Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:45.316995Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:45.317025Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:45.317043Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:45.317153Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:45.660110Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:45.881974Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:45.923173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:45.923247Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:45.960124Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:46.086037Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:46.262972Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:46.386921Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.312663Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708635643787478:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:49.312769Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:49.313004Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708635643787488:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:49.313036Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:49.413734Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.481883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.588709Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.674988Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.735662Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.814377Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.894793Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:49.916164Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708614168948559:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:49.916244Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:49.984455Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:50.113156Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708639938755974:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.113286Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.113718Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708639938755979:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.113797Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708639938755980:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.113848Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:50.118834Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:50.140481Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708639938755983:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:27:50.239683Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708639938756066:4941] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:00.079501Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:00.079536Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TSentinelTests::Smoke >> TSentinelTests::PDiskPileGuardHalfPile |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskFaultyState [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 21112, MsgBus: 21674 2026-01-10T12:25:14.528945Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707968955431169:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.529517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:14.811446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:14.811612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:14.842376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:14.919803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:14.920136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:14.996571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:14.996595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:14.996607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:14.996676Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:15.217070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:15.499230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:15.514242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:15.538000Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:15.569685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.723085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.876752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:15.950053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.932833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707981840334887:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.932969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.933846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707981840334897:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.933951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.245277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.277605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.318054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.356391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.389910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.430084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.480357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.557059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:18.641073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707986135303067:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.641153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.641346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707986135303072:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.641382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707986135303073:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.641488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:18.645693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:18.657337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707986135303076:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:18.742156Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707986135303127:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:19.526120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707968955431169:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:19.526199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:20.382239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 604Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:54.530965Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:54.698443Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:54.706946Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:54.707109Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:54.708178Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:54.726072Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:54.834537Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:54.834562Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:54.834569Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:54.834663Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:54.924463Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:55.507991Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:55.700521Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:55.711492Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:55.724058Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:55.832384Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:56.159341Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:56.280037Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:59.500598Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593708655825614103:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:59.500698Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:00.674887Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708681595419724:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:00.675027Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:00.675415Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708681595419733:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:00.675491Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:00.779081Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:00.828559Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:00.877654Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:00.927054Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.007099Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.054971Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.107646Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.181728Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.298383Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708685890387917:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:01.298497Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:01.298583Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708685890387922:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:01.298785Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593708685890387924:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:01.298881Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:01.303453Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:01.321602Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593708685890387926:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:01.421870Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593708685890387977:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:04.715538Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSentinelTests::PDiskErrorState >> TSentinelBaseTests::GuardianFaultyPDisks [GOOD] >> TSentinelBaseTests::GuardianRackRatio >> TSentinelBaseTests::GuardianRackRatio [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::GuardianRackRatio [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> TSentinelTests::PDiskPileGuardHalfPile [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode >> TSentinelTests::PDiskPileGuardFullPile [GOOD] >> TSentinelTests::PDiskPileGuardConfig |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSentinelTests::PDiskUnknownState [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskUnknownState [GOOD] Test command err: 2026-01-10T12:28:11.913697Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.913770Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.913839Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.913867Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.913913Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.913976Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.916537Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.920258Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... sk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.868034Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-10T12:28:13.868133Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.868242Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.868260Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-01-10T12:28:13.868294Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:13.868563Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:56, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-10T12:28:13.868616Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-10T12:28:13.868811Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-10T12:28:13.868847Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:56 2026-01-10T12:28:13.868904Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:13.868925Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:13.869015Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:28:13.869054Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-10T12:28:13.869094Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-10T12:28:13.869118Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-10T12:28:13.869136Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:28:13.869154Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:28:13.869171Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-10T12:28:13.869188Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-10T12:28:13.869360Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.869794Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.869890Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved17 } ResponseTime: 120110 2026-01-10T12:28:13.869923Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 17 2026-01-10T12:28:13.870086Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.870175Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved15 } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.870207Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-10T12:28:13.870298Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.870391Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.870417Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-01-10T12:28:13.870495Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:13.870528Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:13.870801Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:43, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-10T12:28:13.870851Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-10T12:28:13.871025Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-10T12:28:13.871057Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:43 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskFaultyGuard |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] >> TSentinelTests::PDiskPileGuardConfig [GOOD] >> TProxyActorTest::TestAttachSession [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> TProxyActorTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] Test command err: 2026-01-10T12:28:11.926625Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.926685Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.926739Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.926772Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.926821Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.926884Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.927628Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.931354Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-10T12:28:16.064431Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-10T12:28:16.064612Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-10T12:28:16.064687Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:16.075490Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:16.075575Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:16.075705Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:28:16.075757Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-10T12:28:16.075790Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-10T12:28:16.075853Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-10T12:28:16.075914Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:28:16.075966Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:28:16.075998Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-10T12:28:16.076030Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-10T12:28:16.076497Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.077237Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.077519Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.077778Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.077965Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.078152Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.078313Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.078477Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-10T12:28:16.078569Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:16.079271Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:61, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:16.079464Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:63, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:16.079525Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:60, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:16.079565Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:62, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:16.079626Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-01-10T12:28:16.080402Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-10T12:28:16.080583Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:60 2026-01-10T12:28:16.080673Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:61 2026-01-10T12:28:16.080701Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:62 2026-01-10T12:28:16.080748Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:63 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] Test command err: 2026-01-10T12:28:11.913687Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.913747Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.913804Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.913826Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.913858Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.913929Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.916285Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.919895Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-10T12:28:15.920873Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-10T12:28:15.921015Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-10T12:28:15.921072Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:15.931896Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:15.931952Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:15.932052Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:28:15.932152Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-10T12:28:15.932196Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-10T12:28:15.932223Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-10T12:28:15.932246Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:28:15.932268Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:28:15.932293Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-10T12:28:15.932314Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-10T12:28:15.932914Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.933353Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.933461Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.933638Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.933803Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.933989Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.934147Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.934300Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-10T12:28:15.934359Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:15.934767Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:48, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:15.934821Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:50, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:15.934849Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:49, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:15.934884Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:51, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:15.934925Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-01-10T12:28:15.935262Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-10T12:28:15.935307Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:48 2026-01-10T12:28:15.935354Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:49 2026-01-10T12:28:15.935379Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:50 2026-01-10T12:28:15.935406Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:51 >> TProxyActorTest::TestCreateSemaphoreInterrupted |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TSentinelTests::PDiskFaultyGuard [GOOD] >> TSentinelTests::PDiskFaultyGuardWithForced >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TKesusTest::TestAcquireSemaphore [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TAccessServiceTest::Authenticate |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> Secret::Deactivated [GOOD] >> FolderServiceTest::TFolderServiceAdapter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2026-01-10T12:27:37.783480Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.783608Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.807785Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.807972Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.848105Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.848646Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4099683402793181299, session=0, seqNo=0) 2026-01-10T12:27:37.848859Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:37.861129Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4099683402793181299, session=1) 2026-01-10T12:27:37.861485Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=11489395392101466496, session=0, seqNo=0) 2026-01-10T12:27:37.861603Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:37.876610Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=11489395392101466496, session=2) 2026-01-10T12:27:37.877505Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:37.879613Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:37.879796Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:37.880047Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Lock2" count=1) 2026-01-10T12:27:37.880428Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-10T12:27:37.880533Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-10T12:27:37.880653Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=333, session=1, semaphore="Lock2" count=1) 2026-01-10T12:27:37.880720Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-10T12:27:37.894111Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:37.894212Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-10T12:27:37.894263Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=333) 2026-01-10T12:27:37.894807Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=5527466066493649341, name="Lock1") 2026-01-10T12:27:37.899737Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=5527466066493649341) 2026-01-10T12:27:37.900392Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:156:2178], cookie=4725228861176962334, name="Lock2") 2026-01-10T12:27:37.900482Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:156:2178], cookie=4725228861176962334) 2026-01-10T12:27:37.916519Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:37.916638Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:37.917136Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:37.917717Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:37.936169Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:37.936321Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:37.936390Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-10T12:27:37.936437Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-10T12:27:37.936857Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:195:2208], cookie=12577489923238030763, name="Lock1") 2026-01-10T12:27:37.936948Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:195:2208], cookie=12577489923238030763) 2026-01-10T12:27:37.937473Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:203:2215], cookie=16349714717625112432, name="Lock2") 2026-01-10T12:27:37.937561Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:203:2215], cookie=16349714717625112432) 2026-01-10T12:27:38.422076Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:38.435342Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:38.815321Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:38.828013Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:39.210858Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:39.233195Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:39.591882Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:39.604927Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:39.987797Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:40.020809Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:40.379226Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:40.393735Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:40.752613Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:40.764945Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.136546Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.160811Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.528268Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.541016Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.936501Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.948700Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.312118Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.326814Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.700289Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.712790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.085223Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.097790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.452669Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.464760Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.872914Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.885576Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.251826Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.264488Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.631768Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.644844Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.015115Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.027698Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.433861Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.446824Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.866002Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.878838Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.258117Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.274922Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.663933Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.676387Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.041692Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.054386Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.432785Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.445114Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.822716Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... ck.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:14.722597Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:15.072690Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:15.084320Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:15.423164Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:15.434839Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:15.790885Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:15.804323Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:16.160018Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:16.172523Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:16.545927Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:16.558480Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:16.911321Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:16.923425Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:17.275569Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.287824Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:17.640887Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.653054Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.004821Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.016798Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.401497Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:28:18.401609Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:28:18.401666Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-10T12:28:18.401781Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-10T12:28:18.401846Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-10T12:28:18.401884Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:28:18.413835Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:28:18.414420Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:367:2347], cookie=10755560561911414271, name="Lock1") 2026-01-10T12:28:18.414502Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:367:2347], cookie=10755560561911414271) 2026-01-10T12:28:18.414903Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:370:2350], cookie=3140328731302246096, name="Lock2") 2026-01-10T12:28:18.414956Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:370:2350], cookie=3140328731302246096) 2026-01-10T12:28:18.415331Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:373:2353], cookie=6892368793333518038) 2026-01-10T12:28:18.415384Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:373:2353], cookie=6892368793333518038) 2026-01-10T12:28:18.428911Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:18.429009Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:18.429395Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:18.429912Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:18.477823Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:18.477983Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-10T12:28:18.478038Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-10T12:28:18.478424Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:412:2383], cookie=5026767698013009453) 2026-01-10T12:28:18.478520Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:412:2383], cookie=5026767698013009453) 2026-01-10T12:28:18.479157Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2389], cookie=8875037524727077173, name="Lock1") 2026-01-10T12:28:18.479246Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2389], cookie=8875037524727077173) 2026-01-10T12:28:18.479798Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:422:2392], cookie=15028461956850368563, name="Lock2") 2026-01-10T12:28:18.479869Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:422:2392], cookie=15028461956850368563) 2026-01-10T12:28:18.832058Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:18.832195Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:18.852233Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:18.852573Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:18.876817Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:18.877357Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=13761490041803508042, session=0, seqNo=0) 2026-01-10T12:28:18.877536Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:28:18.889678Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=13761490041803508042, session=1) 2026-01-10T12:28:18.890038Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=1109429910803981437, session=0, seqNo=0) 2026-01-10T12:28:18.890173Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:28:18.901823Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=1109429910803981437, session=2) 2026-01-10T12:28:18.902081Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-10T12:28:18.914414Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=111) 2026-01-10T12:28:18.915118Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=6791788970361101607, name="Sem1", limit=1) 2026-01-10T12:28:18.915282Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:28:18.927473Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=6791788970361101607) 2026-01-10T12:28:18.927983Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=333, session=1, semaphore="Sem1" count=100500) 2026-01-10T12:28:18.940191Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=333) 2026-01-10T12:28:18.940548Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=222, session=1, semaphore="Sem1" count=1) 2026-01-10T12:28:18.940718Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:28:18.940927Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=333, session=2, semaphore="Sem1" count=1) 2026-01-10T12:28:18.953177Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=222) 2026-01-10T12:28:18.953284Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=333) 2026-01-10T12:28:18.953912Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2182], cookie=2796492681442372566, name="Sem1") 2026-01-10T12:28:18.954005Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2182], cookie=2796492681442372566) 2026-01-10T12:28:18.954560Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2185], cookie=6357249886059721798, name="Sem1") 2026-01-10T12:28:18.954642Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2185], cookie=6357249886059721798) 2026-01-10T12:28:18.955201Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2188], cookie=12153365715860941096, name="Sem1", force=0) 2026-01-10T12:28:18.967495Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2188], cookie=12153365715860941096) 2026-01-10T12:28:18.968146Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:171:2193], cookie=17857550105252989262, name="Sem1", force=1) 2026-01-10T12:28:18.968270Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-10T12:28:18.980538Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:171:2193], cookie=17857550105252989262) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> FolderServiceTest::TFolderServiceTransitional >> TUserAccountServiceTest::Get >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] >> TAccessServiceTest::PassRequestId >> FolderServiceTest::TFolderService |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] Test command err: 2026-01-10T12:28:11.916573Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.916663Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.916735Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.916765Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.916824Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.916914Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.917854Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.923865Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... oupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1000 } } Group { GroupId: 13 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1001 } } Group { GroupId: 14 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1002 } } Group { GroupId: 15 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1003 } } Group { GroupId: 16 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1000 } } Group { GroupId: 17 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1001 } } Group { GroupId: 18 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1002 } } Group { GroupId: 19 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1003 } } Group { GroupId: 20 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1000 } } Group { GroupId: 21 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1001 } } Group { GroupId: 22 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1002 } } Group { GroupId: 23 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1003 } } Group { GroupId: 24 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1000 } } Group { GroupId: 25 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1001 } } Group { GroupId: 26 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1002 } } Group { GroupId: 27 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1003 } } Group { GroupId: 28 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1000 } } Group { GroupId: 29 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1001 } } Group { GroupId: 30 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1002 } } Group { GroupId: 31 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1003 } } Group { GroupId: 32 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1000 } } Group { GroupId: 33 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1001 } } Group { GroupId: 34 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1002 } } Group { GroupId: 35 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1003 } } Group { GroupId: 36 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1000 } } Group { GroupId: 37 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1001 } } Group { GroupId: 38 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1002 } } Group { GroupId: 39 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1003 } } Group { GroupId: 40 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1000 } } Group { GroupId: 41 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1001 } } Group { GroupId: 42 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1002 } } Group { GroupId: 43 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1003 } } Group { GroupId: 44 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1000 } } Group { GroupId: 45 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1001 } } Group { GroupId: 46 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1002 } } Group { GroupId: 47 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1003 } } Group { GroupId: 48 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1000 } } Group { GroupId: 49 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1001 } } Group { GroupId: 50 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1002 } } Group { GroupId: 51 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1003 } } Group { GroupId: 52 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1000 } } Group { GroupId: 53 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1001 } } Group { GroupId: 54 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1002 } } Group { GroupId: 55 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1003 } } Group { GroupId: 56 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1000 } } Group { GroupId: 57 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1001 } } Group { GroupId: 58 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1002 } } Group { GroupId: 59 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1003 } } Group { GroupId: 60 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1000 } } Group { GroupId: 61 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1001 } } Group { GroupId: 62 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1002 } } Group { GroupId: 63 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1003 } } Group { GroupId: 64 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1000 } } Group { GroupId: 65 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1001 } } Group { GroupId: 66 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1002 } } Group { GroupId: 67 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1003 } } Group { GroupId: 68 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1000 } } Group { GroupId: 69 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1001 } } Group { GroupId: 70 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1002 } } Group { GroupId: 71 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1003 } } Group { GroupId: 72 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1000 } } Group { GroupId: 73 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1001 } } Group { GroupId: 74 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1002 } } Group { GroupId: 75 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1003 } } Group { GroupId: 76 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1000 } } Group { GroupId: 77 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1001 } } Group { GroupId: 78 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1002 } } Group { GroupId: 79 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1003 } } } } Success: true 2026-01-10T12:28:19.457498Z node 13 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.000000s 2026-01-10T12:28:19.457552Z node 13 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:19.457775Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:28:19.457829Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:28:19.458251Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 132 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-132.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 133 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-133.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 134 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-134.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 135 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-135.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 136 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-136.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 137 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-137.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 138 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-138.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 139 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-139.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:19.458802Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 140 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-140.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 141 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-141.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 142 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-142.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 143 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-143.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 144 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-144.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 145 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-145.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 146 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-146.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 147 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-147.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 148 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-148.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 149 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-149.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-10T12:28:19.458897Z node 13 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:06.279277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:06.367627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:06.377505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:292:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:06.681897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:06.682035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:06.687884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048083502116 != 1768048083502120 2026-01-10T12:28:06.696326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:06.762321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:06.844897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:07.331068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:07.331995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:07.332061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:07.332140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:07.332606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:07.346318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:19.375408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2762], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.375596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.376098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.376204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |95.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease |95.6%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TAccessServiceTest::Authenticate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2026-01-10T12:27:40.846652Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.846789Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.868401Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.868586Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.903900Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.904623Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=15058137684892287466, session=0, seqNo=0) 2026-01-10T12:27:40.904808Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.920926Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=15058137684892287466, session=1) 2026-01-10T12:27:40.921296Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=5758619838489072994, session=0, seqNo=0) 2026-01-10T12:27:40.921435Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:40.934557Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=5758619838489072994, session=2) 2026-01-10T12:27:40.935221Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:148:2170], cookie=14001848144995502253, name="Sem1", limit=1) 2026-01-10T12:27:40.935415Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:27:40.950518Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:148:2170], cookie=14001848144995502253) 2026-01-10T12:27:40.950989Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-10T12:27:40.951207Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:27:40.951435Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-10T12:27:40.968570Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:40.968663Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-10T12:27:40.969354Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:156:2178], cookie=14947674070732456789, name="Sem1") 2026-01-10T12:27:40.969475Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:156:2178], cookie=14947674070732456789) 2026-01-10T12:27:40.970037Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2181], cookie=9814248738937751211, name="Sem1") 2026-01-10T12:27:40.970131Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2181], cookie=9814248738937751211) 2026-01-10T12:27:41.404174Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.421002Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.787483Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.804113Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.188360Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.200834Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.542357Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.554877Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.927945Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.940266Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.296025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.308162Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.639755Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.652611Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.005869Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.023388Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.393663Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.408925Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.807746Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.820853Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.204956Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.217203Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.634488Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.646866Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.033207Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.047861Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.443300Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.457129Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.887857Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.903667Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.266615Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.279025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.657369Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.677053Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.048003Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.060142Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.424462Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.441651Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.858709Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.873227Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:49.238372Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:49.252580Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:49.627452Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:49.643415Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.007814Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.021197Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.392411Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.405350Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.795358Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.813936Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:51.203344Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:51.216342Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:51.594741Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:51.607543Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:51.984304Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:51.996669Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:52.374851Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:52.389153Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:52.814634Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:52.826825Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:53.211640Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:53.231942Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:53.601741Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:53.614091Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:53.977287Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:53.990950Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:54.352427Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:54.366511Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... x_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:16.434798Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:16.786431Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:16.798592Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:17.160165Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.172399Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:17.597945Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.610158Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:17.972120Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.984400Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.345813Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.357973Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.719648Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.731912Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.093783Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.106317Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.488647Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.500500Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.863123Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.875726Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:20.246033Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:20.268922Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:20.651306Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:20.663705Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:21.027575Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:21.039493Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:21.402729Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:28:21.402801Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:28:21.402874Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-10T12:28:21.415256Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:28:21.426324Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:456:2415], cookie=17249337326041714456, name="Sem1") 2026-01-10T12:28:21.426452Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:456:2415], cookie=17249337326041714456) 2026-01-10T12:28:21.868099Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:21.868229Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:21.882899Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:21.883170Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:21.907443Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:21.908037Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=6150352959689825216, session=0, seqNo=0) 2026-01-10T12:28:21.908231Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:28:21.920405Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=6150352959689825216, session=1) 2026-01-10T12:28:21.920741Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=14949088911440344109, session=0, seqNo=0) 2026-01-10T12:28:21.920876Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:28:21.932725Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=14949088911440344109, session=2) 2026-01-10T12:28:21.932985Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=14936270658728411018, session=0, seqNo=0) 2026-01-10T12:28:21.933102Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-10T12:28:21.944910Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=14936270658728411018, session=3) 2026-01-10T12:28:21.945395Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=2091193135408337448, name="Sem1", limit=3) 2026-01-10T12:28:21.945519Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:28:21.957468Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=2091193135408337448) 2026-01-10T12:28:21.957726Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-10T12:28:21.957883Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:28:21.958103Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-10T12:28:21.958204Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-10T12:28:21.958321Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-10T12:28:21.970141Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=111) 2026-01-10T12:28:21.970208Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=222) 2026-01-10T12:28:21.970235Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=333) 2026-01-10T12:28:21.970782Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2180], cookie=14911098443573153504, name="Sem1") 2026-01-10T12:28:21.970870Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2180], cookie=14911098443573153504) 2026-01-10T12:28:21.971230Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2183], cookie=9972506027606451189, name="Sem1") 2026-01-10T12:28:21.971289Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2183], cookie=9972506027606451189) 2026-01-10T12:28:21.971537Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=444, session=1, semaphore="Sem1" count=1) 2026-01-10T12:28:21.971644Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-10T12:28:21.983643Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=444) 2026-01-10T12:28:21.984164Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:166:2188], cookie=11586950777551506151, name="Sem1") 2026-01-10T12:28:21.984247Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:166:2188], cookie=11586950777551506151) 2026-01-10T12:28:21.984570Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2191], cookie=10003730887842890955, name="Sem1") 2026-01-10T12:28:21.984624Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2191], cookie=10003730887842890955) 2026-01-10T12:28:21.994891Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:21.995005Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:21.995519Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:21.996192Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:22.042538Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:22.042731Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:28:22.042806Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-10T12:28:22.042839Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-10T12:28:22.043245Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:208:2221], cookie=5117921504268474105, name="Sem1") 2026-01-10T12:28:22.043347Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:208:2221], cookie=5117921504268474105) 2026-01-10T12:28:22.043953Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:217:2229], cookie=12281483129010497495, name="Sem1") 2026-01-10T12:28:22.044041Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:217:2229], cookie=12281483129010497495) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> Secret::DeactivatedQueryService [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-10T12:28:19.791173Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708767209576947:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:19.792588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.004177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.080833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.080952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.103107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.103762Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708767209576916:2081] 1768048099788551 != 1768048099788554 2026-01-10T12:28:20.109273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.266884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.342921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.384751Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d1dc849ed50] Connect to grpc://localhost:12574 2026-01-10T12:28:20.386901Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1dc849ed50] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2026-01-10T12:28:20.405606Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1dc849ed50] Status 7 Permission Denied 2026-01-10T12:28:20.406262Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1dc849ed50] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2026-01-10T12:28:20.409022Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d1dc849ed50] Response AuthenticateResponse { subject { user_account { id: "1234" } } } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2026-01-10T12:28:20.018440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708768477710226:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.018556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.271025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.271155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.315850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.321623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.363757Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708768477710123:2081] 1768048100012703 != 1768048100012706 2026-01-10T12:28:20.364299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.552440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.596922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.620476Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca214f76250] Connect to grpc://localhost:19369 2026-01-10T12:28:20.621604Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca214f76250] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-10T12:28:20.633310Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca214f76250] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2026-01-10T12:28:20.634515Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca214f32550] Connect to grpc://localhost:10037 2026-01-10T12:28:20.635439Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca214f32550] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-10T12:28:20.646512Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca214f32550] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2026-01-10T12:28:20.647249Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca214f32550] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-10T12:28:20.650083Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca214f32550] Status 5 Not Found 2026-01-10T12:28:20.650712Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca214f76250] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-10T12:28:20.652396Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca214f76250] Status 5 Not Found ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2026-01-10T12:28:20.112363Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708771303802525:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.112573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.397818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.397969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.430500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.443822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.457199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.458668Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708771303802422:2081] 1768048100108322 != 1768048100108325 2026-01-10T12:28:20.630578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.695671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-10T12:28:20.196713Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708768706086624:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.197021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.433847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.433952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.477169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.492414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.507705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.537894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708768706086512:2081] 1768048100184740 != 1768048100184743 2026-01-10T12:28:20.662001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.728957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.747611Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d37d758edd0]{trololo} Connect to grpc://localhost:7298 2026-01-10T12:28:20.749066Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d37d758edd0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-10T12:28:20.757921Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d37d758edd0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2026-01-10T12:28:20.182114Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708768007775059:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.182203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.426913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.471759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.471934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.501046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.584816Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708768007774845:2081] 1768048100172202 != 1768048100172205 2026-01-10T12:28:20.597224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.693214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.772348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.778831Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca4aeb9ed50] Connect to grpc://localhost:8845 2026-01-10T12:28:20.791959Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4aeb9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-10T12:28:20.799650Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca4aeb9ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8845: Failed to connect to remote host: Connection refused 2026-01-10T12:28:20.801012Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4aeb9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-10T12:28:20.801639Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca4aeb9ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8845: Failed to connect to remote host: Connection refused 2026-01-10T12:28:21.180142Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:21.802106Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4aeb9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-10T12:28:21.805141Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca4aeb9ed50] Status 5 Not Found 2026-01-10T12:28:21.805600Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4aeb9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-10T12:28:21.809191Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca4aeb9ed50] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TInterconnectTest::TestManyEvents >> TestProtocols::TestConnectProtocol |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::OldFormat >> TInterconnectTest::TestBlobEvent ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2026-01-10T12:28:20.080401Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708769362278918:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.081023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.342485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.342592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.380136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.391955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.450842Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.458171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708769362278854:2081] 1768048100068454 != 1768048100068457 2026-01-10T12:28:20.647458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.653943Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d677f29ed50] Connect to grpc://localhost:13794 2026-01-10T12:28:20.667496Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d677f29ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-10T12:28:20.674040Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d677f29ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13794: Failed to connect to remote host: Connection refused 2026-01-10T12:28:20.675402Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d677f29ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-10T12:28:20.675977Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d677f29ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13794: Failed to connect to remote host: Connection refused 2026-01-10T12:28:20.679321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:21.084359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:21.676668Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d677f29ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-10T12:28:21.679874Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d677f29ed50] Status 5 Not Found 2026-01-10T12:28:21.680335Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d677f29ed50] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-10T12:28:21.683216Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d677f29ed50] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:09.638984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:09.719304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:09.727660Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:292:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:09.990627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:09.990730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:09.994885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048087062796 != 1768048087062800 2026-01-10T12:28:10.000981Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:10.047278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:10.124804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:10.610862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:10.611985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:10.612052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:10.612114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:10.612624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:10.626832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:22.336513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2761], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.336692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:893:2766], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.336822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.338191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.338433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.343924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:22.369709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:896:2769], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:28:22.431385Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:949:2803] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:22.694750Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:959:2812], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2026-01-10T12:28:22.697835Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWJmNzAyMmQtZmM0MGUxYjYtMTRhNWQwOGMtN2VmYTFiZjA=, ActorId: [1:880:2759], ActorState: ExecuteState, LegacyTraceId: 01keky22xq6834z5ca0ek822qn, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 50 } message: "Executing CREATE OBJECT SECRET" end_position { row: 1 column: 50 } severity: 1 issues { message: "metadata provider service is disabled" severity: 1 } } } tx_id# trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |95.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TInterconnectTest::TestNotifyUndelivered >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2026-01-10T12:28:23.009881Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.013261Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.021782Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.022012Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.026807Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.028683Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2026-01-10T12:28:23.011736Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.015147Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.024526Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.024744Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.029716Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-10T12:28:23.031452Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect |95.6%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2026-01-10T12:28:24.858332Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [4:22:2057] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-10T12:28:25.334185Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:20:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-10T12:28:25.823665Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [8:22:2057] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-10T12:28:25.826504Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [7:20:2058] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2026-01-10T12:28:25.329529Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @207 (null) -> PendingActivation 2026-01-10T12:28:25.329597Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:10:2048] [node 5] ICP01 ready to work 2026-01-10T12:28:25.329801Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 (null) -> PendingActivation 2026-01-10T12:28:25.329823Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2026-01-10T12:28:25.330477Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-10T12:28:25.331591Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:12689 2026-01-10T12:28:25.331703Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-10T12:28:25.332048Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:747: Handshake [5:21:2058] [node 6] ICH01 starting outgoing handshake 2026-01-10T12:28:25.332197Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-10T12:28:25.332801Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:751: Handshake [5:21:2058] [node 6] ICH05 connected to peer 2026-01-10T12:28:25.333217Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48676 2026-01-10T12:28:25.333549Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2026-01-10T12:28:25.337820Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 508272 ProgramStartTime: 5171098341916 Serial: 564823713 ReceiverNodeId: 6 SenderActorId: "[5:564823713:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 508272" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 508272" AcceptUUID: "Cluster for process with id: 508272" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "&\2128\250\364G\010\323\006\333\272\025\345W\260W\265\223\274\3511H\'\341\3203\320\353\035\300\253\036" RequestXxhash: true RequestXdcShuffle: true 2026-01-10T12:28:25.338435Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 508272 ProgramStartTime: 5171098341916 Serial: 564823713 ReceiverNodeId: 6 SenderActorId: "[5:564823713:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 508272" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 508272" AcceptUUID: "Cluster for process with id: 508272" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "&\2128\250\364G\010\323\006\333\272\025\345W\260W\265\223\274\3511H\'\341\3203\320\353\035\300\253\036" RequestXxhash: true RequestXdcShuffle: true 2026-01-10T12:28:25.338498Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [6:23:2058] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-10T12:28:25.338828Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-10T12:28:25.339753Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [6:10:2048] [node 5] ICP02 configured for host ::1:8985 2026-01-10T12:28:25.339814Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:263: Proxy [6:10:2048] [node 5] ICP17 incoming handshake (actor [6:23:2058]) 2026-01-10T12:28:25.339862Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-10T12:28:25.339912Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2026-01-10T12:28:25.339970Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:220: Proxy [6:10:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2026-01-10T12:28:25.340007Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @221 PendingConnection -> PendingConnection 2026-01-10T12:28:25.340421Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 508272 ProgramStartTime: 5171108456730 Serial: 3314220055 SenderActorId: "[6:3314220055:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 508272" AcceptUUID: "Cluster for process with id: 508272" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-10T12:28:25.340859Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 508272 ProgramStartTime: 5171108456730 Serial: 3314220055 SenderActorId: "[6:3314220055:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 508272" AcceptUUID: "Cluster for process with id: 508272" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-10T12:28:25.340917Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:21:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-10T12:28:25.341034Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-10T12:28:25.341627Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "&\2128\250\364G\010\323\006\333\272\025\345W\260W\265\223\274\3511H\'\341\3203\320\353\035\300\253\036" 2026-01-10T12:28:25.341709Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:21:2058] [node 6] ICH04 handshake succeeded 2026-01-10T12:28:25.342016Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-10T12:28:25.342058Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:21:2058] poison: false 2026-01-10T12:28:25.342105Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 PendingConnection -> StateWork 2026-01-10T12:28:25.342273Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:25:2048] 2026-01-10T12:28:25.342332Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:21:2058] self: [5:564823713:0] peer: [6:3314220055:0] socket: 24 qp: -1 2026-01-10T12:28:25.342384Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-10T12:28:25.342463Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-10T12:28:25.342529Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-10T12:28:25.342567Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-10T12:28:25.342608Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-10T12:28:25.342654Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:241: Session [5:25:2048] [node 6] ICS04 subscribe for session state for [5:19:2057] 2026-01-10T12:28:25.342730Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48680 2026-01-10T12:28:25.343117Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:27:2059] [node 0] ICH02 starting incoming handshake 2026-01-10T12:28:25.343234Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:26:2048] [node 6] ICIS01 InputSession created 2026-01-10T12:28:25.343887Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.343992Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.344222Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:23:2058] [node 5] ICH04 handshake succeeded 2026-01-10T12:28:25.344424Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-10T12:28:25.344478Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:23:2058] poison: false 2026-01-10T12:28:25.344513Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 PendingConnection -> StateWork 2026-01-10T12:28:25.344609Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [6:10:2048] [node 5] ICP22 created new session: [6:28:2048] 2026-01-10T12:28:25.344649Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:23:2058] self: [6:3314220055:0] peer: [5:564823713:0] socket: 25 qp: -1 2026-01-10T12:28:25.344690Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-10T12:28:25.344741Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-10T12:28:25.344785Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-10T12:28:25.344812Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-10T12:28:25.344848Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-10T12:28:25.344884Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.344935Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.345004Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:29:2048] [node 5] ICIS01 InputSession created 2026-01-10T12:28:25.345049Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:29:2048] [node 5] ICIS02 ReceiveData called 2026-01-10T12:28:25.345095Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:29:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.345129Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveDa ... 1 LastInputSerial# 1 2026-01-10T12:28:25.354191Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2026-01-10T12:28:25.354745Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-10T12:28:25.355251Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:31:2059] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\362_\020\253\264\304\235\031xM]0\344\341r\004\207\227\305\351\276W\200\272\0307I\301f\232\253)" 2026-01-10T12:28:25.355317Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:31:2059] [node 6] ICH04 handshake succeeded 2026-01-10T12:28:25.355534Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-10T12:28:25.355604Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:34:2060] poison: true 2026-01-10T12:28:25.355655Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:31:2059] poison: false 2026-01-10T12:28:25.355694Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 StateWork -> StateWork 2026-01-10T12:28:25.355738Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:31:2059] self: [5:564823713:0] peer: [6:3314220055:0] socket: 29 qp: -1 2026-01-10T12:28:25.355791Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-10T12:28:25.355856Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-10T12:28:25.355895Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.355933Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2026-01-10T12:28:25.355992Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.356050Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:48694 2026-01-10T12:28:25.356394Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:38:2062] [node 0] ICH02 starting incoming handshake 2026-01-10T12:28:25.356887Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:37:2048] [node 6] ICIS01 InputSession created 2026-01-10T12:28:25.357873Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:409: Proxy [6:10:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:8985) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:35:2061] held: no 2026-01-10T12:28:25.357921Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [6:10:2048] [node 5] ICP052 dropped outgoing handshake: [6:30:2060] poison: false 2026-01-10T12:28:25.357976Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:432: Proxy [6:10:2048] [node 5] ICP28 other handshake is still going on 2026-01-10T12:28:25.358019Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.358089Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.358396Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:35:2061] [node 5] ICH04 handshake succeeded 2026-01-10T12:28:25.358552Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-10T12:28:25.358605Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:35:2061] poison: false 2026-01-10T12:28:25.358647Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 StateWork -> StateWork 2026-01-10T12:28:25.358700Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:35:2061] self: [6:3314220055:0] peer: [5:564823713:0] socket: 31 qp: -1 2026-01-10T12:28:25.358734Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-10T12:28:25.358786Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-10T12:28:25.358817Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.358852Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2026-01-10T12:28:25.358902Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [6:28:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-10T12:28:25.358961Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2026-01-10T12:28:25.358994Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.359069Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:39:2048] [node 5] ICIS01 InputSession created 2026-01-10T12:28:25.359098Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.359138Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359205Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.359241Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359268Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-10T12:28:25.359306Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2026-01-10T12:28:25.359362Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359388Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359550Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-10T12:28:25.359577Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359605Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.359645Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.359700Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-10T12:28:25.359750Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.359829Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.359851Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-10T12:28:25.359874Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.359890Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.359921Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:164: Session [6:28:2048] [node 5] ICS02 send event from: [6:20:2057] to: [5:19:2057] 2026-01-10T12:28:25.359985Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:961: Session [6:28:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2026-01-10T12:28:25.360038Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.360105Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.360123Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-10T12:28:25.360165Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-01-10T12:28:25.360206Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2026-01-10T12:28:25.360282Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-10T12:28:25.360323Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-10T12:28:25.360353Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2026-01-10T12:28:25.360421Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [5:25:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-10T12:28:25.360463Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-10T12:28:25.360530Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:105: Session [5:25:2048] [node 6] ICS01 socket: 29 reason# 2026-01-10T12:28:25.360572Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:543: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:25:2048] VirtualId# [5:564823713:0] 2026-01-10T12:28:25.360609Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 StateWork -> PendingActivation 2026-01-10T12:28:25.360641Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:571: Session [5:25:2048] [node 6] ICS25 shutdown socket, reason# 2026-01-10T12:28:25.360760Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:461: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2026-01-10T12:28:20.167679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708769920403117:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.167809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.413705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.460285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.460397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.477492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.522887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.524279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708769920403090:2081] 1768048100165562 != 1768048100165565 2026-01-10T12:28:20.571575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.726933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:23.214092Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708783600302405:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:23.214181Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:23.228607Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:23.314133Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:23.316343Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708783600302378:2081] 1768048103212663 != 1768048103212666 2026-01-10T12:28:23.351843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:23.351925Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:23.356571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:23.441951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:23.515629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2026-01-10T12:28:20.000643Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708763425761266:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:20.001610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:20.216095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:20.219609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:20.219782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:20.223061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:20.291484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.292789Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708763425761139:2081] 1768048099995285 != 1768048099995288 2026-01-10T12:28:20.496357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:20.502444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:20.502491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:23.080580Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708783070662309:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:23.080731Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:23.097405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:23.182574Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:23.187064Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708783070662282:2081] 1768048103079225 != 1768048103079228 2026-01-10T12:28:23.221465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:23.221566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:23.232763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:23.367411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:23.414511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |95.7%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> KqpEffects::InsertAbort_Params_Success >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpEffects::EmptyUpdate+UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpImmediateEffects::Insert >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink >> KqpEffects::AlterAfterUpsertTransaction+UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpImmediateEffects::UpdateAfterUpsert >> KqpEffects::UpdateOn_Select >> KqpEffects::InsertAbort_Literal_Success |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> KqpImmediateEffects::DeleteAfterUpsert >> KqpInplaceUpdate::SingleRowSimple+UseSink |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |95.7%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::UpsertDuplicates >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> KqpImmediateEffects::UpsertAfterInsert >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2026-01-10T12:27:39.652161Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:39.652306Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:39.673595Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:39.673829Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:39.709522Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:39.710221Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=13589784823502699250, session=0, seqNo=0) 2026-01-10T12:27:39.710421Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:39.723946Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=13589784823502699250, session=1) 2026-01-10T12:27:39.724792Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-10T12:27:39.724962Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:39.725117Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:39.737170Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2026-01-10T12:27:39.737582Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:39.754517Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2026-01-10T12:27:39.755220Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=4737491491845989284, name="Lock1") 2026-01-10T12:27:39.755356Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=4737491491845989284) 2026-01-10T12:27:40.383265Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.383390Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.404642Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.405234Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.444198Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.445261Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=10824813993011097082, session=0, seqNo=0) 2026-01-10T12:27:40.445436Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.458830Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=10824813993011097082, session=1) 2026-01-10T12:27:40.459226Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=14893414350323725601, session=0, seqNo=0) 2026-01-10T12:27:40.459363Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:27:40.477756Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=14893414350323725601, session=2) 2026-01-10T12:27:40.478890Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-10T12:27:40.479041Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-10T12:27:40.479126Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-10T12:27:40.493624Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-10T12:27:40.494163Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-10T12:27:40.494339Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-10T12:27:40.494432Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-10T12:27:40.506828Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=112) 2026-01-10T12:27:40.507300Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:40.507593Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-10T12:27:40.521110Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2026-01-10T12:27:40.521212Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=223) 2026-01-10T12:27:40.521595Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-10T12:27:40.521966Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-10T12:27:40.534050Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=333) 2026-01-10T12:27:40.534130Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=334) 2026-01-10T12:27:40.969625Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:40.983922Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.357434Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.373171Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.738975Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.751867Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.111189Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.123237Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.484260Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.496296Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.842703Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.855126Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.196101Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.209169Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.561295Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.573577Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.937063Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.949361Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.345352Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.358402Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.730838Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.744740Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.108566Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.123825Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.504446Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.520415Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.932491Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.949647Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.356855Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.369300Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.751381Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.765221Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.135915Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.148758Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.515275Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.527944Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.920346Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.933909Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.313935Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.326038Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.713565Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... 026-01-10T12:28:22.112136Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:22.516169Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:22.528364Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:22.890009Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:22.902094Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:23.268903Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:23.284294Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:23.652694Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:23.665145Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:24.029179Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.041481Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:24.447998Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.460286Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:24.822767Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.835024Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.197985Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.210137Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.550701Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.562870Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.914390Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.926318Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:26.288288Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:26.300680Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:26.652245Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:26.663899Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.006051Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.018287Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.360402Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.372559Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.732541Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.751441Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:28.275426Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2026-01-10T12:28:28.275608Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-10T12:28:28.289183Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2026-01-10T12:28:28.311289Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:598:2535], cookie=5960379066004157857) 2026-01-10T12:28:28.311502Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:598:2535], cookie=5960379066004157857) 2026-01-10T12:28:28.312531Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:601:2538], cookie=4513162956493669030) 2026-01-10T12:28:28.312652Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:601:2538], cookie=4513162956493669030) 2026-01-10T12:28:28.313581Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:604:2541], cookie=15736610849660914815, name="Lock1") 2026-01-10T12:28:28.313720Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:604:2541], cookie=15736610849660914815) 2026-01-10T12:28:28.315645Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:607:2544], cookie=15569103764652125306, name="Lock1") 2026-01-10T12:28:28.315764Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:607:2544], cookie=15569103764652125306) 2026-01-10T12:28:28.680014Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:28.680175Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:28.713435Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:28.713879Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:28.746722Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:28.747320Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=10314249876319650969, session=0, seqNo=0) 2026-01-10T12:28:28.747499Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:28:28.763564Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=10314249876319650969, session=1) 2026-01-10T12:28:28.763963Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=6138815880595864588, session=0, seqNo=0) 2026-01-10T12:28:28.764150Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-10T12:28:28.782059Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=6138815880595864588, session=2) 2026-01-10T12:28:28.782467Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=2837574311769381, session=0, seqNo=0) 2026-01-10T12:28:28.782625Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-10T12:28:28.801252Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=2837574311769381, session=3) 2026-01-10T12:28:28.802029Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=3984913714615148553, name="Sem1", limit=3) 2026-01-10T12:28:28.802234Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-10T12:28:28.817204Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=3984913714615148553) 2026-01-10T12:28:28.817646Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-10T12:28:28.817851Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-10T12:28:28.818078Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=222, session=2, semaphore="Sem1" count=2) 2026-01-10T12:28:28.818367Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-10T12:28:28.831441Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=111) 2026-01-10T12:28:28.831548Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=222) 2026-01-10T12:28:28.831589Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=333) 2026-01-10T12:28:28.832322Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=3172600967837318226, name="Sem1") 2026-01-10T12:28:28.832444Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=3172600967837318226) 2026-01-10T12:28:28.833005Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=16206290429380068129, name="Sem1") 2026-01-10T12:28:28.833096Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=16206290429380068129) 2026-01-10T12:28:28.833401Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2161], cookie=444, name="Sem1") 2026-01-10T12:28:28.833546Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-10T12:28:28.833634Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-10T12:28:28.834091Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-10T12:28:28.847108Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2161], cookie=444) 2026-01-10T12:28:28.847939Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=8226565729200023471, name="Sem1") 2026-01-10T12:28:28.848061Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=8226565729200023471) 2026-01-10T12:28:28.849209Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=10164807100203207546, name="Sem1") 2026-01-10T12:28:28.849330Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=10164807100203207546) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpWrite::CastValues >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpImmediateEffects::InsertDuplicates-UseSink >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::NodeStatusComputer [GOOD] >> TSentinelTests::InitialDeploymentGracePeriod >> KqpOverload::OltpOverloaded-Distributed >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> KqpWrite::ProjectReplace+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2026-01-10T12:28:11.915406Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.915486Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.915576Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.915615Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.915666Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.915762Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.916749Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.922918Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... e: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-10T12:28:29.060662Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-10T12:28:29.060823Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-10T12:28:29.060904Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:29.061349Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 8:33, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:29.061415Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 1:6, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:29.061461Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 5:23, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:29.061513Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-10T12:28:29.072006Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 1 2026-01-10T12:28:29.072092Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-10T12:28:29.082863Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:29.082918Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:29.083010Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 2 2026-01-10T12:28:29.083042Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-10T12:28:29.083187Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-10T12:28:29.083275Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-10T12:28:29.083313Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-10T12:28:29.083368Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-10T12:28:29.083399Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-10T12:28:29.083429Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-10T12:28:29.083455Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-10T12:28:29.083481Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-10T12:28:29.083861Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2026-01-10T12:28:29.083899Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-01-10T12:28:29.084322Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.084728Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.084922Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085203Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085355Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085538Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085687Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085845Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-10T12:28:29.085905Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2026-01-10T12:27:40.579835Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:27:40.579984Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:27:40.601264Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:27:40.601471Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:27:40.636653Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:27:40.637292Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=1324765693637247230, session=0, seqNo=0) 2026-01-10T12:27:40.637461Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:27:40.649754Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=1324765693637247230, session=1) 2026-01-10T12:27:40.650538Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:136:2161], cookie=12083183382036737884 2026-01-10T12:27:40.651002Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:150:2172], cookie=13095085387291212944) 2026-01-10T12:27:40.651085Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:150:2172], cookie=13095085387291212944) 2026-01-10T12:27:41.090782Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.105336Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.456409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.468766Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:41.827580Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:41.841175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.198392Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.211695Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.584902Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.597178Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:42.953331Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:42.966390Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.307037Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.318947Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:43.669058Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:43.680966Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.031251Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.043859Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.434208Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.446709Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:44.827187Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:44.839730Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.220468Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.234260Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:45.623017Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:45.640363Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.032436Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.052789Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.463905Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.481009Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:46.873821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:46.888984Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.253265Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.265946Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:47.641634Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:47.654836Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.021878Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.036460Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.449708Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.462877Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:48.846822Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:48.860033Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:49.228388Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:49.240877Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:49.637778Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:49.652295Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.033711Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.049020Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.447821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.462695Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:50.843068Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:50.855780Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:51.252018Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:51.265130Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:51.626531Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:51.640485Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:52.020878Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:52.033742Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:52.465796Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:52.480983Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:52.854555Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:52.867162Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:53.253739Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:53.266087Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:53.642184Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:53.654028Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:54.013485Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:54.027297Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:54.411798Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:54.424699Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:54.810084Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:54.837136Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:55.208986Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:55.223455Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:55.593029Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:55.606365Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:55.991777Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:56.004245Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:56.446881Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:56.472855Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:56.838387Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:56.851087Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:57.232883Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:57.245271Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:27:57.610821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:27:57.628020Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... : tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:17.745099Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.096611Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.108815Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.460852Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.472839Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:18.825217Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:18.837257Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.189178Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.201459Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.596141Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.608307Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:19.961692Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:19.973986Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:20.349160Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:20.369238Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:20.725109Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:20.737424Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:21.091688Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:21.103870Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:21.487451Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:21.499225Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:21.861950Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:21.873818Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:22.226721Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:22.239924Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:22.491132Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:22.503270Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:22.857077Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:22.868914Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:23.254713Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:23.267037Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:23.630977Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:23.643186Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:23.995824Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.008328Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:24.362050Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.374136Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:24.726926Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:24.739543Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.092939Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.105389Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.467895Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.480050Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:25.821110Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:25.833376Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:26.185768Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:26.198208Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:26.540484Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:26.552344Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.030184Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.042524Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.406225Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.418748Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:27.789109Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:27.814048Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:28.228508Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:28.245133Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:28.616427Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:28.629012Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:29.014971Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:29.028922Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:29.419227Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:29.433349Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:29.831747Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:29.845284Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:30.240445Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:30.254221Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:30.639124Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:28:30.651989Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:28:30.994825Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:28:30.994924Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:28:31.007474Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:28:31.018592Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:643:2569], cookie=11847652609613098929) 2026-01-10T12:28:31.018763Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:643:2569], cookie=11847652609613098929) 2026-01-10T12:28:31.385908Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:31.386027Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:31.402007Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:31.402145Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:31.436969Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:31.437906Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-10T12:28:31.438063Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:28:31.450303Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=12345, session=1) 2026-01-10T12:28:31.451101Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-10T12:28:31.463955Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:144:2166], cookie=23456, session=1) 2026-01-10T12:28:31.832159Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:28:31.832287Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:28:31.852732Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:28:31.852869Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:28:31.867751Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:28:31.868749Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-01-10T12:28:31.868912Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:28:31.893103Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-01-10T12:28:31.893962Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:143:2166], cookie=23456, session=1, seqNo=0) 2026-01-10T12:28:31.906177Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:143:2166], cookie=23456, session=1) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpEffects::EmptyUpdate+UseSink [GOOD] >> KqpEffects::EmptyUpdate-UseSink >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 26532, MsgBus: 19100 2026-01-10T12:26:48.321596Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708375548339934:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:48.323854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:48.576307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:48.576400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:48.639936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:48.698459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:48.716440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:48.778491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:48.778518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:48.778526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:48.778610Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:48.921382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:49.249089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:49.254051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:49.326020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.331691Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:49.498633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.652385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:49.724364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:51.600128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708388433243586:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.600258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.600600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708388433243596:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:51.600644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.067197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.105081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.140939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.185689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.223377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.261990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.295975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.334568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:52.423052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708392728211761:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.423149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.423398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708392728211766:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.423437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708392728211767:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.423468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:26:52.427111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:26:52.442220Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708392728211770:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:26:52.544041Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708392728211821:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:26:53.320257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708375548339934:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:53.320337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:26:53.990822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... 41: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:00.778314Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:00.862974Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:00.865886Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708684231152707:2081] 1768048080758936 != 1768048080758939 2026-01-10T12:28:00.901812Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:00.901923Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:00.915357Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:00.961216Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:00.961236Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:00.961243Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:00.961312Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:00.996852Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:01.524238Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:01.542201Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.613871Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.770911Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:01.808717Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:01.912481Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.340220Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708705705991065:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.340331Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.344207Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708705705991075:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.344305Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.437555Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.478545Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.520693Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.561851Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.601925Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.641555Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.686707Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.745170Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:05.798876Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708684231152732:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:05.809011Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:05.849940Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708705705991952:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.850027Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.850145Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708705705991957:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.850238Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708705705991959:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.850287Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:05.854594Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:05.868127Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708705705991961:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:05.970604Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708705705992013:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:15.837660Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:15.837714Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:20.280293Z node 5 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=YjNmMGZiNzYtOTJmYTQyMzMtZGYyZWQwYmUtY2FhODFjOWI= }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=YjNmMGZiNzYtOTJmYTQyMzMtZGYyZWQwYmUtY2FhODFjOWI= |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpEffects::AlterAfterUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpEffects::UpdateOn_Select [GOOD] >> KqpFail::Immediate >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpImmediateEffects::UpdateAfterInsert >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpImmediateEffects::Upsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] Test command err: 2026-01-10T12:28:12.252306Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:12.252355Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:12.252455Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:12.252479Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:12.252518Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:12.252579Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:12.253213Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:12.256636Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-10T12:28:32.311050Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-10T12:28:32.311165Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-10T12:28:32.311282Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-10T12:28:32.311334Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:32.322022Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:32.322084Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-10T12:28:32.322183Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-10T12:28:32.322241Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-10T12:28:32.322275Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-10T12:28:32.322315Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-10T12:28:32.322345Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-10T12:28:32.322374Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-10T12:28:32.322402Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-10T12:28:32.322431Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-10T12:28:32.322690Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323343Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323493Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323609Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323744Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323860Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.323986Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.324109Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-10T12:28:32.324147Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:32.324585Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 11:47, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:32.324663Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-10T12:28:32.324921Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 6 2026-01-10T12:28:32.324963Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 11:47 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink >> KqpEffects::EmptyUpdate-UseSink [GOOD] >> KqpEffects::EffectWithSelect-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 6580, MsgBus: 24537 2026-01-10T12:27:11.709266Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708472119788972:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:11.709317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:12.032311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:12.035751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:12.035835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:12.136728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:12.164491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:12.244146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:12.254558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:12.254586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:12.254594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:12.254675Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:12.739742Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:12.760444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:12.766003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:12.867992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.343128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489299659398:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.343224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.343496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489299659410:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.343530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708489299659411:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.343556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.346706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:15.356391Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708489299659414:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:27:15.453023Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708489299659465:2813] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:15.809619Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2026-01-10T12:27:15.809677Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2026-01-10T12:27:15.827935Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593708489299659497:2355], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01keky00msd5ggs6d5qdm22het. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZTVkNzBmMjYtZmIxNDdlOTQtMmFhOGE0My05ZGM2OTI1. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-10T12:27:15.749467Z }, code: 2029 }. 2026-01-10T12:27:15.857620Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[1:7593708489299659498:2355], 1} 2026-01-10T12:27:15.857656Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[1:7593708489299659498:2355], 2} 2026-01-10T12:27:15.857679Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037888 Cancelled read: {[1:7593708489299659498:2355], 0} 2026-01-10T12:27:15.857700Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[1:7593708489299659498:2355], 3} 2026-01-10T12:27:15.857720Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[1:7593708489299659498:2355], 4} 2026-01-10T12:27:15.857742Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037893 Cancelled read: {[1:7593708489299659498:2355], 5} 2026-01-10T12:27:15.857763Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037894 Cancelled read: {[1:7593708489299659498:2355], 6} 2026-01-10T12:27:15.857784Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037895 Cancelled read: {[1:7593708489299659498:2355], 7} 2026-01-10T12:27:15.894371Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZTVkNzBmMjYtZmIxNDdlOTQtMmFhOGE0My05ZGM2OTI1, ActorId: [1:7593708489299659396:2355], ActorState: ExecuteState, LegacyTraceId: 01keky00msd5ggs6d5qdm22het, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-10T12:27:15.749467Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-10T12:27:15.749467Z } , code: 2029 query_phases { duration_us: 146440 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 1511 affected_shards: 8 } compilation { duration_us: 289958 cpu_time_us: 283267 } process_cpu_time_us: 739 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048586,\"Max\":1048586,\"Min\":1048586,\"History\":[64,1048586]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1768048035828,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1511,\"Max\":1511,\"Min\":1511,\"History\":[64,1511]},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":289958,\"CpuTimeUs\":283267},\"ProcessCpuTimeUs\":739,\"TotalDurationUs\":553073,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":113298},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node ... 2026-01-10T12:28:21.275329Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:21.365981Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:21.367300Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708773837453753:2081] 1768048101248985 != 1768048101248988 2026-01-10T12:28:21.389854Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:21.389953Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:21.414435Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:21.459886Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:21.459921Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:21.459932Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:21.460044Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:21.498743Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:21.908645Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:21.928674Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:21.999305Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:22.200159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:22.260094Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:22.293745Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.275881Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708791017324817:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.275991Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.276247Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708791017324827:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.276291Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.337116Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.366300Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.393396Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.421090Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.449004Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.483490Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.544439Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.588021Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:25.651137Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708791017325694:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.651216Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.651286Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708791017325699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.651328Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593708791017325701:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.651360Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:25.654094Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:25.662732Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593708791017325703:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:25.723441Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593708791017325754:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:26.250433Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593708773837453780:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:26.250486Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:27.210403Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.972706Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NDMzNjEyYi0zMDc1Y2U5Mi1hZmEwZGZmYy1kZmIzMTQ0Mg==, ActorId: [5:7593708799607260649:2530], ActorState: ExecuteState, LegacyTraceId: 01keky2b10d8pqz4k3ryjhjbh5, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202524 > 50331648)" issue_code: 2013 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::OutOfSpace >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink >> KqpOverload::OltpOverloaded-Distributed [GOOD] >> KqpReattach::ReattachDeliveryProblem >> KqpEffects::AlterAfterUpsertTransaction-UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpdateOn >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> KqpWrite::CastValuesOptional [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 5964, MsgBus: 62501 2026-01-10T12:28:27.726172Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708798546352873:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.726284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:28.068439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.101795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.101903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.163906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.215020Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.219879Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708798546352846:2081] 1768048107724625 != 1768048107724628 2026-01-10T12:28:28.288524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.288555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.288569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.288665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.290360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.740650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.929024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.953845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:29.043163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.167284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.332021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.412923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.240229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815726223907:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.240392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.240734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815726223917:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.240796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.572217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.600679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.633644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.669965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.702211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.742800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.782902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.856948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.925248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815726224787:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.925345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.925442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815726224792:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.925556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815726224794:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.925602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.929343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.939909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708815726224796:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:32.040459Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708820021192143:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.726936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708798546352873:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.727003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.610733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... B, gRPC: 27356, MsgBus: 64973 2026-01-10T12:28:35.016165Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708831994336021:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:35.016208Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:35.051571Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:35.140230Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:35.142330Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708827699368683:2081] 1768048115014510 != 1768048115014513 2026-01-10T12:28:35.171299Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:35.171386Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:35.183117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:35.330589Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:35.364460Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:35.364491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:35.364499Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:35.364577Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:35.881990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:35.902442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.988976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.125804Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:36.236307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.311729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.537580Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708844879239741:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.537670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.537941Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708844879239751:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.537989Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.615466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.666238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.699654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.737055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.773118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.838821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.872974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.926484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.018499Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708849174207917:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.018611Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.018937Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708849174207922:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.018975Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708849174207923:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.019036Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.031532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.043442Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708849174207926:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.136944Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708849174207979:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:40.016475Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708831994336021:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:40.016549Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:40.676333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11669, MsgBus: 15096 2026-01-10T12:28:27.812056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708797482666640:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.824974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:28.192362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.268132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.268252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.278044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.279614Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708797482666601:2081] 1768048107790874 != 1768048107790877 2026-01-10T12:28:28.282642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.368532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.368567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.368579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.368669Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.402028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.821132Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.930002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.936214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:28.987361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.178569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.344833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.436290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.202050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814662537665:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.202200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.202566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814662537675:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.202638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.608398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.640290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.668429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.700857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.739092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.782798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.832396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.877651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.951898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814662538549:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.951985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.952301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814662538553:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.952344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814662538555:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.952375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.956627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.972061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708814662538558:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:32.045983Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708818957505905:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.793303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708797482666640:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.793388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.710640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... 833594848227:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:35.272812Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:35.296899Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:35.396275Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708833594848202:2081] 1768048115272257 != 1768048115272260 2026-01-10T12:28:35.405150Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:35.406208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:35.406279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:35.418458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:35.534681Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:35.535189Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:35.535197Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:35.535204Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:35.535271Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:36.004293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:36.011275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:36.019088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.111991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.289194Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:36.293653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.360718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.705041Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708846479751962:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.705149Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.705817Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708846479751972:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.705899Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.782247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.817338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.851793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.883756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.915917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.968239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.010643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.069345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.153706Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850774720138:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.153805Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.153892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850774720143:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.153935Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850774720145:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.153981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.157209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.170878Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708850774720147:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.237372Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708850774720198:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:40.273121Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708833594848227:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:40.273224Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:41.129697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::ManyFlushes >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 5753, MsgBus: 3508 2026-01-10T12:28:28.579803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708803063344461:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:28.579961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:28.888196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.899865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.899983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.926744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:29.028156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:29.144647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:29.148498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:29.148528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:29.148537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:29.148637Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:29.579332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:29.657311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.663046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:29.710522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.844469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.010281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.089429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.063392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820243215330:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.063510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.063876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820243215340:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.063928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.452035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.486439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.516177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.559961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.588886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.622910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.664739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.718632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.798218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820243216212:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.798306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.798377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820243216217:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.798819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820243216220:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.798862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.802037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:32.811694Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708820243216219:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:32.916744Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708820243216272:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:33.577906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708803063344461:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:33.578008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:34.405274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 14678, MsgBus: 3900 2026-01-10T12:28:35.947802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:35.948108Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:36.087993Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:36.116750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:36.116845Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:36.146902Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:36.240811Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:36.240837Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:36.240853Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:36.240948Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:36.243168Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:36.625134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:36.644577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.708916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.868881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.936052Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:36.945457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.359415Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850801373607:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.359528Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.359823Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850801373616:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.359874Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.428119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.460586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.491700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.525179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.554778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.589785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.661610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.714125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.801217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850801374487:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.801310Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.801638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850801374492:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.801712Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708850801374493:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.801793Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.806718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.818324Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708850801374496:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.904002Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708850801374547:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:41.521406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 21525, MsgBus: 28436 2026-01-10T12:28:29.862623Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708808520283506:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:29.863122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:30.068650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:30.099291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:30.099392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:30.149537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708808520283464:2081] 1768048109860974 != 1768048109860977 2026-01-10T12:28:30.157988Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:30.164000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:30.264701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:30.264723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:30.264735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:30.264831Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:30.269138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:30.656376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:30.712330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.820155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.915440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:30.978217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.059651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.135530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825700154519:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.135679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.136117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825700154529:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.136169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.519463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.559335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.592203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.623585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.660245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.732120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.803361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.846572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.914847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825700155404:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.914915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.914936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825700155409:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.915095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825700155411:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.915143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.918565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:33.930388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708825700155412:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:33.998875Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708825700155466:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:34.868286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708808520283506:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.869604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31872, MsgBus: 6434 2026-01-10T12:28:36.929596Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708839973700116:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:36.929734Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:37.012649Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:37.037620Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:37.038661Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708839973700081:2081] 1768048116928729 != 1768048116928732 2026-01-10T12:28:37.067212Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:37.067302Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:37.078405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:37.140610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:37.140633Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:37.140640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:37.140722Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:37.278792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:37.533005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:37.552661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.613401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.731588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.803683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.943587Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:40.185349Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708857153571146:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.185419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.185595Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708857153571155:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.185641Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.259355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.290645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.323670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.364317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.390844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.423227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.487561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.547859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.640118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708857153572024:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.640193Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.640231Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708857153572029:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.640423Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708857153572031:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.640494Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.643691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:40.653804Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708857153572032:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:40.714713Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708857153572084:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:41.929996Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708839973700116:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:41.930132Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::BigRow |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3090, MsgBus: 11682 2026-01-10T12:28:29.743268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708809758766532:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:29.743841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:29.975880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:29.983203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:29.983318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:30.011019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:30.050144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:30.158395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:30.173566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:30.173583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:30.173589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:30.173655Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:30.582632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:30.588488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:30.633162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.749102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:30.752317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.903355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.976750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.048405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826938637558:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.048520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.048877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826938637568:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.048933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.384567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.415269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.448975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.484308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.521353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.597480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.633871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.689607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.777676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826938638441:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.777789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.777893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826938638446:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.777951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826938638448:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.777997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.781467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:33.791898Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708826938638450:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:33.883859Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708826938638501:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:34.742111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708809758766532:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.743527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:35.786645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:37.502944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:37.580224Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:37.581399Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708842515059829:2081] 1768048117490288 != 1768048117490291 2026-01-10T12:28:37.598043Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:37.621495Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:37.621574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:37.627305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:37.676237Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:37.676276Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:37.676283Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:37.676362Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:37.870921Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:38.088801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:38.094358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:38.104856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.168434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.366918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.427941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.541777Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:40.777095Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708855399963590:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.777225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.777541Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708855399963600:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.777617Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.840909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.879501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.908885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.941153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.971934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.005936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.075070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.126840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.205053Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708859694931766:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.205150Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.205223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708859694931771:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.205266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708859694931773:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.205301Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.209008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:41.219408Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708859694931775:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:41.322060Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708859694931826:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:42.496190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708842515059862:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:42.496285Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:42.786031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskRackGuardFullRack [GOOD] Test command err: 2026-01-10T12:28:11.917215Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-10T12:28:11.917269Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-10T12:28:11.917316Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-10T12:28:11.917346Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-10T12:28:11.917385Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-10T12:28:11.917439Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-10T12:28:11.918615Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } Hosts { Name: "node-9" State: UNKNOWN Devices { Name: "pdisk-9-36" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-37" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-38" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-39" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 9 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-10" State: UNKNOWN Devices { Name: "pdisk-10-40" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-41" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-42" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-43" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 10 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-11" State: UNKNOWN Devices { Name: "pdisk-11-44" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-45" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-46" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-47" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 11 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-12" State: UNKNOWN Devices { Name: "pdisk-12-48" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-49" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-50" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-51" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 12 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-13" State: UNKNOWN Devices { Name: "pdisk-13-52" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-53" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-54" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-55" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 13 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-14" State: UNKNOWN Devices { Name: "pdisk-14-56" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-57" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-58" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-59" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 14 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-15" State: UNKNOWN Devices { Name: "pdisk-15-60" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-61" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-62" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-63" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 15 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-16" State: UNKNOWN Devices { Name: "pdisk-16-64" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-65" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-66" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-67" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 16 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-10T12:28:11.925251Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" G ... data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.184839Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-92.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-93.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-94.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-95.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.184959Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-96.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-97.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-98.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-99.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185082Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-100.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-101.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 102 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-102.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 103 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-103.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185202Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-104.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-105.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-106.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-107.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185319Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-108.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-109.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-110.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-111.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185438Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185560Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 76 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-76.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 77 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-77.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 78 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-78.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 79 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-79.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185686Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 80 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-80.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 81 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-81.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-82.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-83.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185824Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-84.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-85.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-86.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-87.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.185946Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 72 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-72.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 73 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-73.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 74 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-74.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 75 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-75.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-10T12:28:41.186016Z node 17 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-10T12:28:41.186763Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 31:124, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186826Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 31:125, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186860Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 31:126, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186890Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 31:127, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186921Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 23:92, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186952Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 23:93, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.186984Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 23:94, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.187015Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 23:95, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-10T12:28:41.187053Z node 17 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2026-01-10T12:28:41.187425Z node 17 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 20 2026-01-10T12:28:41.187462Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:92 2026-01-10T12:28:41.187491Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:93 2026-01-10T12:28:41.187516Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:94 2026-01-10T12:28:41.187539Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:95 2026-01-10T12:28:41.187562Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:124 2026-01-10T12:28:41.187587Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:125 2026-01-10T12:28:41.187612Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:126 2026-01-10T12:28:41.187637Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:127 >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink >> KqpImmediateEffects::ReplaceExistingKey >> KqpFail::Immediate [GOOD] >> KqpFail::OnCommit |95.7%| [TA] $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpEffects::InsertAbort_Select_Success >> KqpImmediateEffects::Replace |95.7%| [TA] {RESULT} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 16687, MsgBus: 9296 2026-01-10T12:28:29.170554Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708808071643750:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:29.179104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:29.235397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:29.501237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:29.515812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:29.515952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:29.579632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:29.590243Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:29.591323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708808071643711:2081] 1768048109168464 != 1768048109168467 2026-01-10T12:28:29.719895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:29.719921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:29.719933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:29.720031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:29.739258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:30.181311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:30.191661Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:30.239366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.357436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.502636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.562230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.260340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820956547473:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.260418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.260637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820956547483:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.260691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.593979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.628046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.663284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.698681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.734867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.777277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.812533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.879739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:32.945656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820956548353:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.945735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820956548358:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.945747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.945990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708820956548360:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.946032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:32.949848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:32.962822Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708820956548361:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:33.047709Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708825251515709:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:34.170190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708808071643750:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.170280Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:34.482816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... ecution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:36.012782Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:36.016409Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708835580131308:2081] 1768048115890885 != 1768048115890888 2026-01-10T12:28:36.047415Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:36.239086Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:36.287270Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:36.287298Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:36.287306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:36.287381Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:36.663478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:36.668806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:36.680227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.744366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.936423Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:36.942573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.004772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.284753Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708852760002358:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.284849Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.285097Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708852760002368:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.285144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.355029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.386278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.420641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.458817Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.502494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.552455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.602745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.657884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.746572Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708852760003237:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.746652Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.746735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708852760003242:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.746852Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708852760003244:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.746907Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.752550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.765138Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708852760003246:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:39.819373Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708852760003297:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:40.892199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708835580131332:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:40.892275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:41.458119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.497053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.535389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpWrite::OutOfSpace [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7217, MsgBus: 13404 2026-01-10T12:28:27.694536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708798054757477:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.695147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.955979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.956112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:27.964100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.059930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.084282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.276797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.340574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.696827Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.880944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.975527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.107576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.284645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.374800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.947696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708810939661203:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.947814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.948335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708810939661213:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.948385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.460101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.492610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.527736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.562876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.597403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.642154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.685968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.771875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.849666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815234629382:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.849774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.849957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815234629388:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.850002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815234629387:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.850052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.853386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.862918Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708815234629391:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.954630Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708815234629444:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.693518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708798054757477:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.693594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.371521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.748966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpA ... execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:41.890303Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:41.931423Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:41.931526Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:41.935626Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:41.999756Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:41.999780Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:41.999791Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:41.999876Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.026768Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.473807Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:42.487660Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.552184Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.708216Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.809534Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:42.816261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.230183Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878599406364:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.230274Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.230715Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878599406373:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.230776Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.316431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.347720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.379319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.408844Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.440760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.474962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.509248Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.562806Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.672258Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878599407242:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.672359Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.672704Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878599407248:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.672762Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878599407247:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.672872Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.675889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:45.685187Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708878599407251:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:45.758444Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708878599407302:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:46.805344Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708861419535346:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:46.805443Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:47.467505Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.744881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:28:47.928888Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NzNlNzMxMzQtZWI4YmZhMGItYTgyYTg1MmYtZTRiNDdkYjQ=, ActorId: [3:7593708887189342158:2521], ActorState: ExecuteState, LegacyTraceId: 01keky2vxmaph11ar39qebzs5v, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: Trying to start YDB, gRPC: 22401, MsgBus: 19365 2026-01-10T12:28:27.638921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708799932025030:2206];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.639187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.932164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.944483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.944595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.001714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.124137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.126867Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708799932024843:2081] 1768048107627884 != 1768048107627887 2026-01-10T12:28:28.144726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.644824Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.999294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.005977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:29.063027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.223212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.389275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.466765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.113187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817111895901:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.113320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.113637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817111895911:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.113723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.454564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.492625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.523643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.552906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.586541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.642054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.677572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.735585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.809347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817111896792:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.809440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.809945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817111896798:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.809993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817111896797:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.810029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.814754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.827551Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708817111896801:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.888290Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708817111896852:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.637708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708799932025030:2206];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.637780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.617375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... ng to start YDB, gRPC: 14763, MsgBus: 61168 2026-01-10T12:28:42.151636Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708862389395616:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:42.151907Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:42.293553Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:42.294633Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:42.315210Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:42.315319Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:42.334971Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:42.423657Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:42.423680Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.423688Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.423767Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.557438Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.978056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:42.982759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:42.990753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.043489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.163289Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.194654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.319552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.682005Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708875274299327:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.682151Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.682522Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708875274299337:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.682578Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.735871Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.765839Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.798254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.831884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.868310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.905537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.938502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.990933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.096626Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879569267503:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.096721Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.096767Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879569267508:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.096889Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879569267510:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.096949Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.100587Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.111634Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708879569267512:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:46.177628Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708879569267565:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.152291Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708862389395616:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.152395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:47.883935Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8788, MsgBus: 13677 2026-01-10T12:28:27.654677Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708797660215726:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.654811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.931272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.957188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.957332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.017523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.020936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.191907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276297Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.662578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.944808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.003409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.172336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.345439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.417285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.069353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814840086656:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.069562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.070068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814840086666:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.070117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.484556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.515349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.543989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.573980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.606959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.649693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.729222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.778048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.862433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814840087539:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.862537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.862909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814840087545:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.862918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814840087544:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.863012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.866790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.877457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708814840087548:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.939516Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708814840087599:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.653958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708797660215726:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.654027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.676671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.973214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpA ... execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:42.125413Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:42.155088Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:42.155171Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:42.166674Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:42.222183Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:42.222205Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.222211Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.222297Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.332438Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.725212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:42.745287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.811174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.022893Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.066234Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.100124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.437044Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878638804229:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.437148Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.437453Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878638804239:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.437527Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.526337Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.562848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.607476Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.643024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.678951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.716973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.749138Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.799076Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.887675Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878638805105:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.887787Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.887850Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878638805110:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.888062Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878638805112:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.888176Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.891504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:45.901489Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708878638805113:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:45.958288Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708878638805165:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.028395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708865753900523:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.028496Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:47.991766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.258774Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-10T12:28:48.478285Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ODEzZjYyODMtNzQxN2ZlZGQtZjlhYzgyYi03MDgyMjYwNw==, ActorId: [3:7593708887228740021:2521], ActorState: ExecuteState, LegacyTraceId: 01keky2w8tdk92jjss8sjbhkxs, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Tables: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 5963, MsgBus: 30468 2026-01-10T12:28:27.652671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708798976396691:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.652810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.904529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.935603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.935685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.062630Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708798976396667:2081] 1768048107649211 != 1768048107649214 2026-01-10T12:28:28.080059Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.085922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.191545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.668257Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.954410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.020167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.200056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.375003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.461047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.140977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816156267725:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.141105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.141389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816156267735:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.141440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.454733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.486608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.518224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.552994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.587772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.627033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.667921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.746301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.832049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816156268603:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.832141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.832795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816156268608:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.832840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816156268609:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.832932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.837086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.847072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708816156268612:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.907553Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708816156268663:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.650669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708798976396691:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.650746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1306, MsgBus: 2531 2026-01-10T12:28:34.945139Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708829077517551:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.948706Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snap ... DATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7446, MsgBus: 16620 2026-01-10T12:28:42.122638Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708865215801202:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:42.122695Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:42.176350Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:42.223157Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708865215801179:2081] 1768048122121946 != 1768048122121949 2026-01-10T12:28:42.252622Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:42.304918Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:42.305006Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:42.308640Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:42.363312Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:42.363334Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.363343Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.363426Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.368752Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.849549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:42.855185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:42.864547Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.935553Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.112241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.148504Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.176702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.659944Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878100704936:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.660035Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.660275Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708878100704946:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.660321Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.731935Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.763693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.795432Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.832253Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.862763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.899706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.946155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.994517Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.084447Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708882395673110:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.084535Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.084759Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708882395673115:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.084781Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708882395673116:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.084821Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.089346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.101097Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708882395673119:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:46.180572Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708882395673172:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.122859Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708865215801202:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.122963Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 15315, MsgBus: 3978 2026-01-10T12:28:35.170769Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708833618740992:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:35.173355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:35.479780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:35.479902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:35.480287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:35.512939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:35.575134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:35.685432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:35.719162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:35.719193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:35.719200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:35.719283Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:36.157521Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:36.223955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:36.230520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:36.303001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.438339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.599931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.665781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.601034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708846503644703:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.601176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.601577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708846503644713:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.601619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.975028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.007962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.043405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.072882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.106892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.147017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.194371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.239948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.329573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708850798612890:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.329663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.329975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708850798612895:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.330010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708850798612896:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.330034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.334186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.347268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708850798612899:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.420206Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708850798612950:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:40.170942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708833618740992:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:40.170999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:40.967191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 864909869525:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:42.646226Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:42.688254Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:42.771791Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:42.776255Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708864909869498:2081] 1768048122644831 != 1768048122644834 2026-01-10T12:28:42.790576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:42.790664Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:42.814766Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:42.900159Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.912683Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:42.912705Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.912711Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.912786Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:43.297522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:43.303116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:43.309389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.379501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.520214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.581670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.685739Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:45.724260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708877794773254:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.724347Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.724769Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708877794773263:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.724821Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.821529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.855618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.890016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.919050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.955649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.992307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.024840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.079004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.167629Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708882089741436:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.167752Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.167896Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708882089741441:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.167979Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708882089741443:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.168049Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.171252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.182777Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708882089741445:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:46.239544Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708882089741496:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.646212Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708864909869525:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.646318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:47.844384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::EffectWithSelect+UseSink >> KqpEffects::EffectWithSelect-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28395, MsgBus: 65343 2026-01-10T12:28:27.693331Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708797894581334:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.696131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.747749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:28.040149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.062969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.063079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.139063Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.145232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.244349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.708346Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.939551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.952783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:29.019374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.171661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.331895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.420315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.859978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708810779484951:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.860134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.860541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708810779484960:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.860669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.454488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.484383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.517624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.552603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.583180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.627897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.687532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.759889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.851665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815074453134:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.851752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815074453139:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708815074453140:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.855082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.865730Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708815074453143:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.947006Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708815074453194:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.694020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708797894581334:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.694085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31004, MsgBus: 6879 2026-01-10T12:28:34.722913Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists ... 0T12:28:42.784796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.849248Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.027535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.107329Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.118819Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:45.923923Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708875784883528:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.924006Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.924407Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708875784883537:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.924488Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.006480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.041120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.076217Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.114606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.189362Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.237851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.283635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.337077Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.416645Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880079851703:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.416726Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.416728Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880079851708:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.416931Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880079851710:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.416977Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.420264Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.430228Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708880079851711:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:46.490175Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708880079851763:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.110860Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708862899979806:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.110949Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:48.405643Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-10T12:28:48.405888Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:28:48.406037Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:28:48.406231Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7593708888669786698:2533], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [3:7593708888669786667:2533]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[3:7593708888669786698:2533].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:28:48.406328Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593708888669786692:2533], SessionActorId: [3:7593708888669786667:2533], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7593708888669786667:2533]. 2026-01-10T12:28:48.406604Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=OGJjNjA3YzQtODIwNmFlZTgtZDkzNjE3NTEtOWNiNWI1Zjc=, ActorId: [3:7593708888669786667:2533], ActorState: ExecuteState, LegacyTraceId: 01keky2w80bd4csgrq3yyskv35, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7593708888669786693:2533] from: [3:7593708888669786692:2533] trace_id# 2026-01-10T12:28:48.406723Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593708888669786693:2533] TxId: 281474976710673. Ctx: { TraceId: 01keky2w80bd4csgrq3yyskv35, Database: /Root, SessionId: ydb://session/3?node_id=3&id=OGJjNjA3YzQtODIwNmFlZTgtZDkzNjE3NTEtOWNiNWI1Zjc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:28:48.407057Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=OGJjNjA3YzQtODIwNmFlZTgtZDkzNjE3NTEtOWNiNWI1Zjc=, ActorId: [3:7593708888669786667:2533], ActorState: ExecuteState, LegacyTraceId: 01keky2w80bd4csgrq3yyskv35, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TwoShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::OutOfSpace [GOOD] Test command err: Trying to start YDB, gRPC: 17150, MsgBus: 1466 2026-01-10T12:28:32.325973Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708822383985817:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.326198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:32.561600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:32.568815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:32.568934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:32.629938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:32.640790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708822383985781:2081] 1768048112324923 != 1768048112324926 2026-01-10T12:28:32.642659Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:32.736009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:32.754893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:32.754913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:32.754920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:32.755028Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:33.166075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:33.229776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.334160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:33.352529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.532979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.608716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.502025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708835268889544:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:35.502123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:35.502593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708835268889554:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:35.502645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:35.900578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.940466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.986098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.030715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.097792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.172265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.226592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.280589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.367327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708839563857722:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.367401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.367802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708839563857725:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.367893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.368211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708839563857729:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.377882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:36.389465Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708839563857731:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:36.473561Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708839563857782:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:37.326215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708822383985817:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:37.326286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8810, MsgBus: 2047 ... waiting for SysViewsRoster update finished 2026-01-10T12:28:41.989241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:42.000319Z node ... node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:42.507183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:42.866218Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:42.866279Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.866317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.866745Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.878912Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.353963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:43.398962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.656908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.030935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.348286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.892212Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1908:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.892370Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.893715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1981:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.893845Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.925180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.139592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.439233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.689261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.935282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.220449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.516778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.833858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.226560Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2794:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.226680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.227099Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2798:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.227172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.227246Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2801:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.233396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:47.398144Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2803:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:47.458418Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2863:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_DISK_GROUP_OUT_OF_SPACE;details=;tx_id=3; 2026-01-10T12:28:49.200334Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [2:3172:4429], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [2:3121:4429]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:3172:4429]. Ignored this error.{
: Error: , code: 2006 } 2026-01-10T12:28:49.200982Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3166:4429], SessionActorId: [2:3121:4429], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036
: Error: , code: 2006 . sessionActorId=[2:3121:4429]. 2026-01-10T12:28:49.201458Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=ZWE4ZWQzMmYtMjZiOTk2ODUtNWU1MWQ5ZjYtYjY4NTNhYzY=, ActorId: [2:3121:4429], ActorState: ExecuteState, LegacyTraceId: 01keky2x27aaa73vv0c830xsxy, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [2:3167:4429] from: [2:3166:4429] trace_id# 2026-01-10T12:28:49.201657Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:3167:4429] TxId: 281474976715673. Ctx: { TraceId: 01keky2x27aaa73vv0c830xsxy, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE4ZWQzMmYtMjZiOTk2ODUtNWU1MWQ5ZjYtYjY4NTNhYzY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036 subissue: {
: Error: , code: 2006 } } trace_id# 2026-01-10T12:28:49.202077Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZWE4ZWQzMmYtMjZiOTk2ODUtNWU1MWQ5ZjYtYjY4NTNhYzY=, ActorId: [2:3121:4429], ActorState: ExecuteState, LegacyTraceId: 01keky2x27aaa73vv0c830xsxy, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`." issue_code: 2036 severity: 1 issues { issue_code: 2006 severity: 1 } } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 19565, MsgBus: 1192 2026-01-10T12:28:27.630110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708797715946441:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.630209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.905841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.925904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.925996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.016826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.076254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.104171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.639062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.974651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.032406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.151026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.310800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.385920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.113887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814895817455:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.114006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.114363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814895817465:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.114501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.527689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.563187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.600732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.634111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.669432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.727140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.760678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.806354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.873795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814895818341:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.873872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.873877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814895818346:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.874023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814895818348:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.874056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.877114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.887470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708814895818349:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.972598Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708814895818401:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.630531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708797715946441:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.630594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.384456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 9054, MsgBus: 62821 2026-01-10T12:28:35.079323Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivere ... hard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.183882Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.198814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.279104Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.128303Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880684137585:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.128417Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.128753Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880684137595:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.128810Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.214440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.251008Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.285971Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.321550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.352486Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.391201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.427436Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.487913Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.576151Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880684138464:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.576240Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.576438Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880684138470:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.576490Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.576490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880684138469:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.583553Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.600150Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708880684138473:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:46.693636Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708880684138524:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.171881Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708863504266565:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.171954Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:48.519009Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.813044Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2026-01-10T12:28:48.813279Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:28:48.813385Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:28:48.813579Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7593708889274073646:2533], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7593708889274073439:2533]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7593708889274073646:2533].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:28:48.813655Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593708889274073638:2533], SessionActorId: [3:7593708889274073439:2533], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7593708889274073439:2533]. 2026-01-10T12:28:48.813908Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MjM2YzMzY2MtZmQ0YWFkMjItNTZmZDMxZTQtOTEzMTgwYzU=, ActorId: [3:7593708889274073439:2533], ActorState: ExecuteState, LegacyTraceId: 01keky2wns887nvbw96y5hkrgt, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7593708889274073639:2533] from: [3:7593708889274073638:2533] trace_id# 2026-01-10T12:28:48.814016Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593708889274073639:2533] TxId: 281474976715675. Ctx: { TraceId: 01keky2wns887nvbw96y5hkrgt, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MjM2YzMzY2MtZmQ0YWFkMjItNTZmZDMxZTQtOTEzMTgwYzU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:28:48.814276Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MjM2YzMzY2MtZmQ0YWFkMjItNTZmZDMxZTQtOTEzMTgwYzU=, ActorId: [3:7593708889274073439:2533], ActorState: ExecuteState, LegacyTraceId: 01keky2wns887nvbw96y5hkrgt, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestImmediateEffects`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 20456, MsgBus: 28155 2026-01-10T12:28:27.629986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708800477556773:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.630060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.944156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.045514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.048164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708800477556734:2081] 1768048107625675 != 1768048107625678 2026-01-10T12:28:28.063127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.063242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.065845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.186630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.640896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.895291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.901386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:28.957382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.103549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.301025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.378819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.161654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817657427794:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.161793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.162180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817657427804:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.162242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.606037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.650043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.686677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.725830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.762850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.825717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.854468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.895978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.966901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817657428676:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.967009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.967308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817657428682:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.967417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817657428680:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.967503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.971565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.986438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708817657428685:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:32.065857Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708821952396032:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.629943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708800477556773:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.630027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.414121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... configuration 2026-01-10T12:28:42.562340Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:43.030148Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:43.036009Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:43.049838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.099925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.252537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.310132Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:43.318315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.154831Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879356142255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.154908Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.155328Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879356142264:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.155381Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.237588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.273561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.314551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.348266Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.384837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.420031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.456546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.534890Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.616943Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879356143138:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.617062Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.617827Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879356143144:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.617838Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708879356143143:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.617876Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.621581Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.635784Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708879356143147:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:46.716876Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708879356143198:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:47.291517Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708862176271242:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.291588Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:48.414829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.126206Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7593708892241045674:2535], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01keky2wzy7bpwm0sdb4nqn9z3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZmU1OGVjY2QtMzg0ZmY5ZS0yN2RkNGEyMS0yNGZiMTU0NA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-10T12:28:49.126298Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593708892241045674:2535], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01keky2wzy7bpwm0sdb4nqn9z3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZmU1OGVjY2QtMzg0ZmY5ZS0yN2RkNGEyMS0yNGZiMTU0NA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-10T12:28:49.126992Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZmU1OGVjY2QtMzg0ZmY5ZS0yN2RkNGEyMS0yNGZiMTU0NA==, ActorId: [3:7593708887946078104:2535], ActorState: ExecuteState, LegacyTraceId: 01keky2wzy7bpwm0sdb4nqn9z3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11661, MsgBus: 9693 2026-01-10T12:28:27.634753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708797602075882:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.634821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.964869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.965009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.029610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.058321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.067386Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.229079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.275777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.275817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.275824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.275910Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.642783Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.939616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.013524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.174845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.364015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.437191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.008964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814781946880:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.009088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.009448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814781946890:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.009542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.454440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.483805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.511876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.543808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.578201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.630412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.677316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.724471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.805005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814781947764:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.805100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.805400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814781947769:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.805445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708814781947770:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.805559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.809322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.824742Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708814781947773:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.899547Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708814781947824:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.635352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708797602075882:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.635416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19168, MsgBus: 7641 2026-01-10T12:28:34.736356Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708830634904817:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.736634Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:34.760192Z node 2 :KQP_PROXY WARN: kqp_final ... ed, will use file: (empty maybe) 2026-01-10T12:28:42.056685Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:42.056695Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:42.056765Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:42.077023Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:42.621552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:42.633888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.688760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.851080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.901976Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:42.930927Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.711113Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708876470032571:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.711196Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.711684Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708876470032580:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.711735Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:45.786004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.820177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.852601Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.890009Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.932732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.971351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.006719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.057449Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.139994Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880765000747:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.140114Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.140146Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880765000752:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.140362Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708880765000754:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.140411Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:46.143983Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:46.153955Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708880765000755:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:46.258720Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708880765000807:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:46.894708Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708859290161552:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:46.894785Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:48.170665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.698585Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593708889354935817:2550], TxId: 281474976710677, task: 1. Ctx: { TraceId : 01keky2wat5ftx9c8mmkb8q0b4. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzE0YmY1Ni1hYmRmZGY3MS0yYmE2ODMwMS0xNzRhYzQzNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:28:48.698986Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593708889354935818:2551], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01keky2wat5ftx9c8mmkb8q0b4. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzE0YmY1Ni1hYmRmZGY3MS0yYmE2ODMwMS0xNzRhYzQzNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593708889354935814:2523], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:28:48.699456Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NzE0YmY1Ni1hYmRmZGY3MS0yYmE2ODMwMS0xNzRhYzQzNQ==, ActorId: [3:7593708889354935688:2523], ActorState: ExecuteState, LegacyTraceId: 01keky2wat5ftx9c8mmkb8q0b4, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpInplaceUpdate::BigRow [GOOD] |95.7%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ManyFlushes [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> KqpReattach::ReattachDeliveryProblem [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10592, MsgBus: 15858 2026-01-10T12:28:27.631057Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708799480148885:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.631132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.895712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.971283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.971359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.020185Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708799480148846:2081] 1768048107626000 != 1768048107626003 2026-01-10T12:28:28.024351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.029738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.120161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.275676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.275808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.275816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.275925Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.643020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:29.005990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:31.066426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816660018921:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.066531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.067086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708816660018931:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.067186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.455094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.473880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:7593708816660018971:2525], Recipient [1:7593708816660018979:2331]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:28:31.474865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:7593708816660018971:2525], Recipient [1:7593708816660018979:2331]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:28:31.475202Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7593708816660018979:2331] 2026-01-10T12:28:31.475544Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:28:31.487172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:7593708816660018971:2525], Recipient [1:7593708816660018979:2331]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:28:31.487333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:28:31.487386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:28:31.489345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:28:31.489399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:28:31.489448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:28:31.489854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:28:31.489920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:28:31.489974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7593708816660018994:2331] in generation 1 2026-01-10T12:28:31.491115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:28:31.539146Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:28:31.539279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:28:31.539329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7593708816660018996:2332] 2026-01-10T12:28:31.539342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:28:31.539354Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:28:31.539366Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:28:31.539513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:7593708816660018979:2331], Recipient [1:7593708816660018979:2331]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:28:31.539557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:28:31.539692Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:28:31.539769Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:28:31.539845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:7593708816660018981:2530], Recipient [1:7593708816660018979:2331]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:28:31.539871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:28:31.539892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7593708816660018977:2529], serverId# [1:7593708816660018981:2530], sessionId# [0:0:0] 2026-01-10T12:28:31.539939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:7593708799480149193:2142], Recipient [1:7593708816660018981:2530] 2026-01-10T12:28:31.539959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:28:31.540034Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:28:31.540052Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:28:31.540137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:28:31.540181Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:28:31.540200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:28:31.540210Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:28:31.540231Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:28:31.540253Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:28:31.540488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715658] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:28:31.540534Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:28:31.540615Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-10T12:28:31.540654Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715658] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:28:31.540669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715658] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:28:31.540684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715658] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:28:31.540694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715658] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:28:31.540975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715658] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:28:31.541001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715658] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:28:31.541014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715658] at 72075186224037888 to execut ... 46131:7762515]; 2026-01-10T12:28:38.226293Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:38.250274Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:38.349349Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:38.350496Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:38.350509Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708847837985748:2081] 1768048118225218 != 1768048118225221 2026-01-10T12:28:38.350564Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:38.350582Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:38.382061Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:38.444629Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:38.444655Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:38.444663Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:38.444739Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:38.558353Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:38.881838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:38.902852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.969160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.128020Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.191991Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.348475Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:41.516286Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708860722889511:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.516375Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.516596Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708860722889521:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.516642Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.621776Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.691865Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.724099Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.759095Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.790012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.828121Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.863435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.908702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.994687Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708860722890392:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.994790Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.995021Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708860722890397:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.995046Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708860722890398:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.995088Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.999298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:42.012403Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708860722890401:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:42.089502Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708865017857748:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:43.226295Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708847837985783:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:43.226367Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:43.978763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 9708, MsgBus: 26937 2026-01-10T12:28:37.743285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708844234339884:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:37.743436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:37.948225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:37.996108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:37.996228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:38.040248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:38.068275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708844234339860:2081] 1768048117742132 != 1768048117742135 2026-01-10T12:28:38.081471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:38.181102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:38.194480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:38.194514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:38.194522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:38.194615Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:38.708943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:38.714356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:38.760975Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:38.772742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.933768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.103782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.175337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.928904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708857119243630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.929048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.929400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708857119243640:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.929506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.271226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.302018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.329644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.355994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.384390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.416737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.448799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.522426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.603360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708861414211803:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.603456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.603629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708861414211809:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.603632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708861414211808:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.603677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:41.606995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:41.616458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708861414211812:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:41.680525Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708861414211863:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:42.749750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708844234339884:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:42.749856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:43.317320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... eTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 62660, MsgBus: 12174 2026-01-10T12:28:45.116054Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708877776312761:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:45.116171Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:45.131196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:45.203173Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:45.249150Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:45.249251Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:45.257328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:45.308846Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:45.308869Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:45.308876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:45.308949Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:45.387839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:45.681172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:45.703176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.759408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.881178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.946363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.123145Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:48.453110Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708890661216485:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.453225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.453526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708890661216495:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.453605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.532461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.566745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.596833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.628225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.659694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.706853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.751207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.809077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.918996Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708890661217369:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.919118Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.920260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708890661217374:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.920325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708890661217375:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.920444Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.925613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:48.936865Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708890661217378:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:49.000648Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708890661217429:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:50.116167Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708877776312761:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:50.116256Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:50.618693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 1459, MsgBus: 7670 2026-01-10T12:28:29.960407Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708808204484826:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:29.960929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:30.208185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:30.295513Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708808204484798:2081] 1768048109958708 != 1768048109958711 2026-01-10T12:28:30.307512Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:30.309176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:30.309274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:30.317217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:30.417033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:30.417057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:30.417066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:30.417161Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:30.466952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:30.814938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:30.891195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.971112Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:31.036062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.188740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.259573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.221203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825384355864:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.221355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.221850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708825384355874:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.221942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.670349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.702271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.734032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.766296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.801373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.838253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.904680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.945251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.015530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708829679324041:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.015624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.015707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708829679324046:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.015770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708829679324048:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.015842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.019608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:34.029881Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708829679324050:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:34.111941Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708829679324101:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:34.961332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708808204484826:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.961402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:35.628599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... ing to start YDB, gRPC: 9274, MsgBus: 27879 2026-01-10T12:28:44.282056Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708870785719641:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:44.282105Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:44.295460Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:44.389943Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:44.410352Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:44.410440Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:44.414781Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:44.458203Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:44.459431Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:44.459443Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:44.459448Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:44.459518Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:44.925697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:44.933241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:44.946437Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.016357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.181423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.289239Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:45.291035Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.040053Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887965590656:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.040155Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.040436Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887965590665:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.040485Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.105433Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.139479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.169950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.200404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.240737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.273267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.308602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.361920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.444096Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887965591539:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.444203Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.444530Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887965591544:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.444583Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887965591545:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.444621Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.448396Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:48.461215Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708887965591548:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:48.519099Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708887965591599:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:49.284192Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708870785719641:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:49.284274Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:50.186683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpReattach::ReattachDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 20290, MsgBus: 11628 ... waiting for SysViewsRoster update finished 2026-01-10T12:28:33.307069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:33.426982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:33.439478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:33.440016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:33.440169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:33.793608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:33.793762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:33.799480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048110530411 != 1768048110530414 2026-01-10T12:28:33.809987Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:33.854527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:33.987609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:34.316150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:34.316220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:34.316257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:34.316835Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:34.329228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:34.643703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:34.740114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.027858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.433365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.738076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.603172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.603450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.604335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.604410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:36.640335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.833814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.105635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.386582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.644434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.908067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.214534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.532483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.927981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.928115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.928802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.928905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.928993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.939245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.116915Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.171142Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=5; Trying to start YDB, gRPC: 17699, MsgBus: 25348 ... waiting for SysViewsRoster update finished 2026-0 ... State: Connecting -> Connected 2026-01-10T12:28:45.365471Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:45.737358Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:45.737424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:45.737460Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:45.737962Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:45.750697Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:46.155990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:46.224120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.476527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.909335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.204354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.823245Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1907:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.823401Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.824731Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1980:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.825020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:47.857528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.061693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.357318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.605283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.854708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.133944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.400538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.706206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.087932Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2791:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.088091Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.088604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.088679Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.088752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.094606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:50.252592Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2800:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:50.311901Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2860:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:52.241558Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3116:4405], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3094:4405]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-10T12:28:52.252173Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3116:4405], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3094:4405]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-10T12:28:52.252947Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3110:4405], SessionActorId: [2:3094:4405], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:3094:4405]. 2026-01-10T12:28:52.253309Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YzBkOWUwMGEtNTI1Y2E3OTYtNTY1NDAyZDEtMjU4MTg4YTk=, ActorId: [2:3094:4405], ActorState: ExecuteState, LegacyTraceId: 01keky3006ckvgd2ssw2dqf2ty, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:3111:4405] from: [2:3110:4405] trace_id# 2026-01-10T12:28:52.253549Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [2:3111:4405] TxId: 281474976715673. Ctx: { TraceId: 01keky3006ckvgd2ssw2dqf2ty, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzBkOWUwMGEtNTI1Y2E3OTYtNTY1NDAyZDEtMjU4MTg4YTk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-10T12:28:52.254007Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YzBkOWUwMGEtNTI1Y2E3OTYtNTY1NDAyZDEtMjU4MTg4YTk=, ActorId: [2:3094:4405], ActorState: ExecuteState, LegacyTraceId: 01keky3006ckvgd2ssw2dqf2ty, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10369, MsgBus: 16099 2026-01-10T12:28:30.151395Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708814071882506:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:30.151482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:30.416461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:30.493344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:30.493442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:30.514201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:30.526644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:30.589400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:30.589429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:30.589436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:30.589532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:30.623338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:30.998589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:31.076427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.162113Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:31.210948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.365175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.430918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.582315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826956786220:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.582433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.586155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708826956786230:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.586283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:33.917180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.945950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.980632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.008887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.036871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.092732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.124145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.170112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.238962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708831251754401:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.239028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.239249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708831251754406:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.239250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708831251754407:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.239285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:34.242907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:34.252725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708831251754410:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:34.326278Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708831251754461:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:35.151792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708814071882506:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:35.151866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:36.234843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.103087Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593708844136656777:2554], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceI ... node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:45.218640Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:45.227337Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:45.708596Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:45.714424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:45.724500Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.786466Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:45.927784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.019002Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.022991Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:48.574686Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887926933004:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.574789Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.575046Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708887926933013:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.575104Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:48.652877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.686382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.721001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.757863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.788907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.831615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.867944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.947520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.041733Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708892221901186:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.041861Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.042169Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708892221901191:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.042235Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708892221901192:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.042308Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.046171Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:49.057229Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708892221901195:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:49.138619Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708892221901246:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:50.019154Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708875042029284:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:50.019245Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:51.202089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.153751Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593708905106803567:2555], TxId: 281474976715678, task: 1. Ctx: { CheckpointId : . TraceId : 01keky2zgb6vqykzgpnmcrtz3m. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YmM4Y2Q5MDYtN2FlODBhNjctZGExMWViODEtMWI3NTVmNjM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:28:52.153989Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593708905106803568:2556], TxId: 281474976715678, task: 2. Ctx: { CheckpointId : . TraceId : 01keky2zgb6vqykzgpnmcrtz3m. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YmM4Y2Q5MDYtN2FlODBhNjctZGExMWViODEtMWI3NTVmNjM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7593708905106803564:2523], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:28:52.154362Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YmM4Y2Q5MDYtN2FlODBhNjctZGExMWViODEtMWI3NTVmNjM=, ActorId: [3:7593708900811836118:2523], ActorState: ExecuteState, LegacyTraceId: 01keky2zgb6vqykzgpnmcrtz3m, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdate |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7191, MsgBus: 18400 2026-01-10T12:28:27.669507Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708799995965061:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.669687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.932179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.940041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.940979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.008907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.078670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.084221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708799995965008:2081] 1768048107666297 != 1768048107666300 2026-01-10T12:28:28.176551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.277047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.277054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.277140Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.683293Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.953328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.035483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.188430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.371813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.466069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.973144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708812880868773:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.973259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.973590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708812880868783:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.973635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.454537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.484182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.512815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.547514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.585310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.619987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.667457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.748707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.835360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817175836951:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.835455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.835512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817175836956:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.836132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817175836959:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.836194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.839251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.852253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708817175836958:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.939068Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708817175837011:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.670854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708799995965061:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.670924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.382549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... B, gRPC: 27694, MsgBus: 16702 2026-01-10T12:28:46.071780Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708882437000094:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:46.076218Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:46.092252Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:46.164365Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:46.165682Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:46.165778Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:46.166652Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708882437000066:2081] 1768048126070127 != 1768048126070130 2026-01-10T12:28:46.201091Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:46.250839Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:46.250871Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:46.250888Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:46.250982Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:46.259397Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:46.771109Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:46.791079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:46.872043Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.036888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.082771Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:47.100528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.768758Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708895321903822:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.768840Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.769196Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708895321903831:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.769245Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.854958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.890088Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.920036Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.953949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.990725Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.029463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.069347Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.128951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.219970Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708899616871999:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.220062Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.220525Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708899616872004:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.220537Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708899616872005:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.220620Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.223766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:50.235951Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708899616872008:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:50.313562Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708899616872059:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:51.077404Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708882437000094:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:51.077481Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:52.143067Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::CheckId [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> Cache::Test4 [GOOD] >> Cache::Test5 >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::ReplaceDuplicates |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> Cache::Test5 [GOOD] >> Cache::Test6 >> TTicketParserTest::BulkAuthorizationRetryError >> TTicketParserTest::LoginRefreshGroupsWithError >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::LoginGood >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> Cache::Test6 [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test6 [GOOD] >> ObjectStorageListingTest::ListingNoFilter |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink |95.8%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpFail::OnCommit [GOOD] >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12110, MsgBus: 11750 2026-01-10T12:28:27.629926Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708800575573535:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.630012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.897838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:27.916888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:27.916995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:27.964577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.029046Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708800575573497:2081] 1768048107625647 != 1768048107625650 2026-01-10T12:28:28.055623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.126312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.276000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.276028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.276039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.276117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.639749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.887311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:28.928784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:28.975878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.139799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.315302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.404496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.129390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817755444559:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.129524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.129880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817755444569:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.129964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.479584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.508014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.536025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.564526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.595768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.639080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.688726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.742357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.810808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817755445447:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.810879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.811199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817755445453:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.811230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.811259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817755445452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.814713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.826595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708817755445456:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:31.917703Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708817755445507:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.630181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708800575573535:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.630225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:33.802670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... qpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:47.534514Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:47.535011Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:47.547250Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:47.547338Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:47.576619Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:47.630973Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:47.630997Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:47.631005Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:47.631089Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:47.729013Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:48.210088Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:48.217568Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:48.232373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.323282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.433203Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:48.472875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.552874Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.368866Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708903270814286:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.368954Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.369335Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708903270814295:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.369387Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.505782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.550549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.587247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.628637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.690791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.730500Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.769972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.821519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.927728Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708903270815166:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.927810Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.928053Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708903270815171:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.928110Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708903270815172:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.928156Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.931759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:51.944511Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708903270815175:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:52.038565Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708907565782522:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:52.416673Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708886090943286:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:52.416746Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:53.782060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:53.852988Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpFail::OnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 31254, MsgBus: 12845 2026-01-10T12:28:27.721284Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708800401469353:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:27.721368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:27.773463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:28.083660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:28.119804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:28.119921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:28.167914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:28.202137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:28.268328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:28.293609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:28.293663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:28.293677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:28.293781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:28.749310Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:28.939088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:29.007017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.175778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.335412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:29.405938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.136123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817581340356:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.136235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.136570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817581340366:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.136615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.497579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.526866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.565401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.598787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.627060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.667119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.704790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.760933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:31.852454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817581341235:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817581341240:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708817581341242:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.852748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:31.856657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.867331Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708817581341244:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:31.974815Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708817581341295:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.722148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708800401469353:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:32.722225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65177, MsgBus: 21839 ... waiting for SysViewsRoster update finished 2026-01-10T12:28:38.567745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown ... D_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:50.941903Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:50.942123Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:51.204779Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:51.204933Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:51.273417Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1768048127578611 != 1768048127578615 2026-01-10T12:28:51.279638Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:51.329319Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:51.458414Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:51.797392Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:51.797475Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:51.797515Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:51.798203Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:51.810216Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:52.229595Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:52.315759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.556116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.897555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:53.189184Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:53.743870Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1905:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:53.744092Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:53.745077Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1978:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:53.745198Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:53.774319Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:53.978644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.268731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.508290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.747281Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.015668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.285993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.579203Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.986173Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2789:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.986315Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.986860Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.986981Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.987070Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2796:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.993963Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:56.164397Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2798:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:56.223418Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2858:4222] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:59.250010Z node 3 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-01-10T12:28:59.251090Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:3171:4429] TxId: 281474976710674. Ctx: { TraceId: 01keky360dbtfcqc7752a73yj6, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NjNjYTg4MGEtMzYzOWFmYWYtODQyNDVjMTAtNmUyYjI5NjE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-10T12:28:59.251526Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NjNjYTg4MGEtMzYzOWFmYWYtODQyNDVjMTAtNmUyYjI5NjE=, ActorId: [3:3116:4429], ActorState: ExecuteState, LegacyTraceId: 01keky360dbtfcqc7752a73yj6, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27895, MsgBus: 2520 2026-01-10T12:28:46.499782Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708880880274498:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:46.500287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:46.762650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:46.765926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:46.766105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:46.769091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:46.835412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:46.916899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:46.916936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:46.916952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:46.917030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:46.961656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:47.344334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:47.403561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.511864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:47.555160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.715127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.786676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.763750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708893765178210:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.763892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.764359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708893765178220:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.764443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.085374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.125387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.154499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.190999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.221328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.289753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.353356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.430336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.520326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898060146396:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.520407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.520654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898060146402:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.520702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898060146401:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.520748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.525114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:50.535757Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708898060146405:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:50.607779Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708898060146456:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:51.496915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708880880274498:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:51.496974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:52.374402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 64205, MsgBus: 7314 2026-01-10T12:28:53.775486Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708911719973346:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:53.775572Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:53.791014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:53.862777Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:53.893670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:53.893738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:53.903856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:53.945189Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:53.945215Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:53.945230Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:53.945311Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:54.036603Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:54.326939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:54.339324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.384721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.526065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.623070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.782376Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:56.735616Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708924604877057:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.735696Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.735919Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708924604877067:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.736008Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.807971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.836949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.867309Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.897553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.930748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.971125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.004496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.080952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.176099Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708928899845240:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.176205Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.176578Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708928899845245:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.176644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708928899845246:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.176929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.180855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:57.195240Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708928899845249:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:57.251226Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708928899845300:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:58.711471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.776022Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708911719973346:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:58.776117Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> TTicketParserTest::LoginBad >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4401, MsgBus: 17702 2026-01-10T12:28:36.767667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708836215134974:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:36.767729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:36.992061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:37.017182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:37.017315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:37.092530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:37.098845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:37.233580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:37.233605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:37.233614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:37.233726Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:37.274871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:37.635602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:37.699255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.786644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:37.874869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.027385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.104867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.989413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708849100038696:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.989576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.990093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708849100038706:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.990164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.378399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.409031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.440003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.469341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.501941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.547722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.593518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.641231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:40.720169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708853395006877:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.720281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.724354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708853395006883:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.724386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708853395006882:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.724417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:40.727623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:40.737915Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708853395006886:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:40.804758Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708853395006939:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:41.767785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708836215134974:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:41.767870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:42.527525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:42.594268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... tion=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:54.109079Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:54.191875Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:54.235731Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:54.235828Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:54.238744Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:54.280390Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:54.280408Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:54.280415Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:54.280489Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:54.363480Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:54.786093Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:54.791042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:54.802418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.850523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.984954Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.052237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.182023Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:57.526081Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708928358862170:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.526184Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.526485Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708928358862180:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.526536Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.597895Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.629029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.657798Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.685231Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.713143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.743500Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.775333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.820928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:57.887959Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708928358863048:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.888039Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.888406Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708928358863053:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.888477Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708928358863054:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.888523Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:57.891903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:57.901216Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708928358863057:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:57.976546Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708928358863108:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:59.096444Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708915473958457:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:59.096524Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:59.881078Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:59.953287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] >> KqpErrors::ResolveTableError >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> KqpErrors::ProposeError >> KqpErrors::ProposeResultLost_RwTx+UseSink >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> ObjectStorageListingTest::ListingNoFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:02.179819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:02.344563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:02.368504Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:02.369271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:02.369406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:02.864969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:02.865114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:02.871870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048139299615 != 1768048139299618 2026-01-10T12:29:02.889972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:02.939254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:03.045357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:03.367673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:03.382753Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:03.499315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.557499Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:29:03.557852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:29:03.615660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:29:03.615833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:29:03.618705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:29:03.618827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:29:03.618921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:29:03.620833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:29:03.621041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:29:03.621179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:29:03.632116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:29:03.662501Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:29:03.663827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:29:03.664053Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:29:03.664233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:29:03.664279Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:29:03.664321Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:03.666856Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:29:03.666986Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:29:03.667119Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:03.667184Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:29:03.667301Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:29:03.667354Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:03.667428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:29:03.667602Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:29:03.668109Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:29:03.671031Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:29:03.673801Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:03.684668Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:29:03.684823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:29:03.803954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:29:03.809695Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:29:03.809798Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:03.811170Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:03.811230Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:29:03.811312Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:29:03.811593Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:29:03.811775Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:29:03.811967Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:03.812135Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:29:03.817713Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:29:03.819307Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:29:03.821150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:29:03.821205Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:03.822094Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:29:03.822171Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:03.823552Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:03.823601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:29:03.823649Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:29:03.823716Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:29:03.823770Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:29:03.823865Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:03.828481Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:03.830223Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:29:03.830437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:29:03.830500Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:29:03.870164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.870296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.870392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.874778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.875074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.882821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:03.889990Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:04.007658Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:04.011194Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:29:04.089387Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:05.012626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2873], serverId# [1:1054:2874], sessionId# [0:0:0] 2026-01-10T12:29:05.015272Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:29:05.015555Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:29:05.026584Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:05.039686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-10T12:29:05.039903Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:29:05.040091Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2026-01-10T12:29:05.040314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8629, MsgBus: 11899 2026-01-10T12:28:51.314188Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708903846372104:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:51.314330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:51.547658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:51.588413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:51.588552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:51.658738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:51.658837Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708903846372002:2081] 1768048131308165 != 1768048131308168 2026-01-10T12:28:51.669865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:51.780880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:51.780905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:51.780914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:51.780988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:51.837111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:52.187670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:52.253708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.322704Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:52.380317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.544979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.614795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.533173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708916731275759:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:54.533308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:54.533757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708916731275769:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:54.533881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:54.815312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.846012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.872585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.900198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.929398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.965816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.995782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.068437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.138868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708921026243938:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.138953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.139274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708921026243943:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.139283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708921026243944:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.139330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.142302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:55.152227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708921026243947:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:55.259211Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708921026243998:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:56.312341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708903846372104:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:56.312426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:56.896385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... ng to start YDB, gRPC: 64658, MsgBus: 17460 2026-01-10T12:28:58.251526Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708930780726266:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:58.251701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:58.271358Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:58.347620Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:58.375853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:58.375936Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:58.385858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:58.432538Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:58.437328Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:58.437354Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:58.437362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:58.437445Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.836478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.842284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:58.855243Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.903529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:59.069385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:59.127870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:59.266449Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:01.694323Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708943665629970:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:01.694397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:01.694640Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708943665629979:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:01.694680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:01.769939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.802998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.832023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.866761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.897378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.931294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.967633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.023319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.100771Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708947960598142:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:02.100870Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:02.100924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708947960598147:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:02.101049Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7593708947960598149:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:02.101095Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:02.105277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:02.116918Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7593708947960598150:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:29:02.199232Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708947960598202:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:03.251975Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7593708930780726266:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:03.252050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:03.604614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4594, MsgBus: 13365 2026-01-10T12:28:34.697466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708829605998871:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:34.698574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:34.973272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:34.987452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:34.987619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:35.025003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:35.107861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:35.176986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:35.188140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:35.188160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:35.188169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:35.188242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:35.668523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:35.703762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:35.726485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.903102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.103742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:36.195224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.298102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708846785869805:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.298226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.298742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708846785869815:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.298849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:38.746169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.784010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.818419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.876274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.906162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:38.944994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.005671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.060787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:39.154843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708851080837982:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.154928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.155025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708851080837987:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.155040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708851080837989:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.155078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:39.158672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:39.168380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708851080837991:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:39.260333Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708851080838044:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:39.690142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708829605998871:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:39.690208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:41.033425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.083725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... teTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 1927, MsgBus: 10775 2026-01-10T12:28:51.338624Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708903412323542:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:51.338706Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:51.359511Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:51.446568Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:51.460588Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:51.460666Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:51.486027Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:51.532170Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:51.543403Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:51.543429Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:51.543438Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:51.543536Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:51.985451Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:52.000352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.054042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.250760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.315477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.437950Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:55.103937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708920592194536:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.104057Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.104327Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708920592194545:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.104392Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.167081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.195823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.225022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.254147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.286262Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.324577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.365154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.411255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.498260Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708920592195421:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.498367Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.498693Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708920592195426:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.498731Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708920592195427:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.498781Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.502458Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:55.513414Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708920592195430:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:55.578314Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708920592195481:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:56.338815Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708903412323542:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:56.338889Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:57.235417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::Interactive >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:04.064449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:04.162330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:04.171513Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:04.171953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:04.172036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:04.489632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:04.489770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:04.495761Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048141260596 != 1768048141260599 2026-01-10T12:29:04.507722Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:04.554762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:04.649241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:04.953019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:04.966773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.070791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.107420Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:29:05.107732Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:29:05.146894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:29:05.147053Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:29:05.149093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:29:05.149195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:29:05.149275Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:29:05.149706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:29:05.149874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:29:05.149991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:29:05.161384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:29:05.190723Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:29:05.191000Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:29:05.191131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:29:05.191172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:29:05.191205Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:29:05.191242Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:05.191743Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:29:05.191840Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:29:05.191966Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:05.192036Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:29:05.192136Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:29:05.192194Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:05.192277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:29:05.192456Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:29:05.192795Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:29:05.192955Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:29:05.195358Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:05.206234Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-10T12:29:05.206370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-10T12:29:05.325568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-10T12:29:05.338721Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-10T12:29:05.338826Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:05.340457Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:05.340522Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:29:05.340572Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-10T12:29:05.340852Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-10T12:29:05.341051Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:29:05.341261Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:29:05.341338Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-10T12:29:05.343508Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-10T12:29:05.343981Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:29:05.345608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-10T12:29:05.345646Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:05.346487Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-10T12:29:05.346873Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:05.348487Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:29:05.348534Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:29:05.348583Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-10T12:29:05.348643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-10T12:29:05.348695Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-10T12:29:05.348791Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:05.361016Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:05.363514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-10T12:29:05.363720Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-10T12:29:05.363791Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-10T12:29:05.374717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.374804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.374867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.375597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.375766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.378999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:05.384921Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:05.504480Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-10T12:29:05.507800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:29:05.592907Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:06.073821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2873], serverId# [1:1054:2874], sessionId# [0:0:0] 2026-01-10T12:29:06.074497Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-10T12:29:06.074837Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-10T12:29:06.086152Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:29:06.092299Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-10T12:29:06.092625Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:29:06.092901Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2026-01-10T12:29:06.093157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2880], serverId# [1:1062:2881], sessionId# [0:0:0] 2026-01-10T12:29:06.095706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1067:2886], serverId# [1:1068:2887], sessionId# [0:0:0] 2026-01-10T12:29:06.095972Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-10T12:29:06.096258Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2026-01-10T12:29:06.096519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1067:2886], serverId# [1:1068:2887], sessionId# [0:0:0] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink |95.8%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 14151, MsgBus: 12957 2026-01-10T12:28:46.196047Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708880070476829:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:46.196246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:46.247306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:46.503859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:46.503962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:46.533496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:46.550607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:46.552620Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708880070476790:2081] 1768048126194606 != 1768048126194609 2026-01-10T12:28:46.556023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:46.656735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:46.656763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:46.656773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:46.656903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:46.805368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:47.080869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:47.140807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.214765Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:47.268372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.416360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:47.480746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.279827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708892955380559:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.279967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.280342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708892955380569:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.280405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:49.658673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.689558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.730492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.763741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.793281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.830423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.868127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.918582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.002728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708897250348734:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.002808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.003002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708897250348740:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.003042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708897250348739:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.003054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.007277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:50.022451Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708897250348743:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:50.098309Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708897250348794:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:51.197783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708880070476829:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:51.197863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:51.769168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... d: 72057594046644480 2026-01-10T12:29:00.820588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.880821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.038141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.083927Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:01.105419Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.276551Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708952301811767:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.276646Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.276901Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708952301811776:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.276967Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.361209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.388537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.413343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.440308Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.469552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.504250Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.540600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.584737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.664461Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708952301812646:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.664554Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708952301812651:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.664553Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.664746Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708952301812653:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.664780Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.668678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:03.681200Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708952301812654:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:03.767294Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708952301812706:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:05.069351Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708939416908055:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:05.069437Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:05.651064Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:06.216925Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710677; 2026-01-10T12:29:06.231862Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7593708965186715162:2559], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7593708960891747801:2559]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7593708965186715162:2559].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-10T12:29:06.232540Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7593708965186715156:2559], SessionActorId: [3:7593708960891747801:2559], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7593708960891747801:2559]. 2026-01-10T12:29:06.232868Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YzEyOTY4NTQtYmQyYzc0YzUtM2EwMTJhZDktYTQyNjliODc=, ActorId: [3:7593708960891747801:2559], ActorState: ExecuteState, LegacyTraceId: 01keky3ds3bgxq7a22tckkg5za, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7593708965186715157:2559] from: [3:7593708965186715156:2559] trace_id# 2026-01-10T12:29:06.233015Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:7593708965186715157:2559] TxId: 281474976710677. Ctx: { TraceId: 01keky3ds3bgxq7a22tckkg5za, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YzEyOTY4NTQtYmQyYzc0YzUtM2EwMTJhZDktYTQyNjliODc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:29:06.233447Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzEyOTY4NTQtYmQyYzc0YzUtM2EwMTJhZDktYTQyNjliODc=, ActorId: [3:7593708960891747801:2559], ActorState: ExecuteState, LegacyTraceId: 01keky3ds3bgxq7a22tckkg5za, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::WriteOverload-InStore >> Normalizers::CleanEmptyPortionsNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::WriteOverload+InStore >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2068, MsgBus: 3337 2026-01-10T12:28:47.490611Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708885887274900:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.492319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:47.731870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:47.763721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:47.763901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:47.821585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:47.824702Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:47.825882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708885887274756:2081] 1768048127475916 != 1768048127475919 2026-01-10T12:28:47.919246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:47.919277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:47.919285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:47.919370Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:48.031427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:48.356779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:48.365802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:48.440940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.496895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:48.591391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.748496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.829019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.761579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898772178520:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.761739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.762203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898772178530:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.762342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.099076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.136212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.170006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.201805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.244949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.282309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.319674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.386358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.470987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903067146701:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.471072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.471080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903067146706:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.471293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903067146708:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.471339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.475077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:51.492221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708903067146709:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:51.573053Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708903067146763:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:52.483846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708885887274900:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:52.483927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:53.037619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ng to start YDB, gRPC: 17507, MsgBus: 11236 2026-01-10T12:29:01.042863Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708943931200923:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:01.044824Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:01.055920Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:01.145349Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:01.162417Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:01.162509Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:01.188925Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:01.240806Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:01.240827Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:01.240833Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:01.240899Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:01.327777Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:01.629411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:01.636955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:29:01.643354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.701967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.856160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:01.920790Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.058643Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:04.469201Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708956816104632:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.469313Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.469632Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708956816104641:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.469697Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.532447Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.584136Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.614030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.642843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.674568Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.710602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.745589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.792174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.883674Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708956816105511:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.883786Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.884198Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708956816105517:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.884204Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708956816105516:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.884261Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:04.889568Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:04.906428Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708956816105520:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:29:04.989195Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708956816105571:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:06.042632Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708943931200923:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:06.042721Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:06.904359Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 17117, MsgBus: 18363 2026-01-10T12:28:47.384859Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708885980740628:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:47.385016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:47.607309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:47.612269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:47.612422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:47.644048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:47.693804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:47.711787Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708885980740499:2081] 1768048127372920 != 1768048127372923 2026-01-10T12:28:47.832717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:47.832749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:47.832793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:47.832907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:47.833912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:48.255351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:48.264833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:48.323478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.414161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:48.502271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.654900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:48.727103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.792936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898865644273:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.793125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.793506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708898865644283:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:50.793555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.097511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.141722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.178008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.213441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.250864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.325506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.362678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.424037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.515592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903160612451:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.515690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.516070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903160612457:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.516100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708903160612456:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.516143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.519986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:51.531984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708903160612460:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:51.637275Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708903160612511:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:52.382734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708885980740628:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:52.382800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:53.242155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... DB, gRPC: 17241, MsgBus: 9298 2026-01-10T12:29:01.590660Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708945306618500:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:01.590857Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:01.604229Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:01.688715Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:01.690132Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708945306618454:2081] 1768048141588312 != 1768048141588315 2026-01-10T12:29:01.723497Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:01.723598Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:01.725355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:01.778312Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:01.778333Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:01.778342Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:01.778428Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:01.893228Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:02.278323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:02.286418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.339889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.545236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.598570Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:02.607623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.194294Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708962486489513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.194403Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.194787Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708962486489523:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.194880Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.258920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.290917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.321925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.354788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.386366Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.423538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.457762Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.511758Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.594979Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708962486490391:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.595083Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708962486490396:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.595101Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.595305Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708962486490399:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.595348Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.600654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:05.615038Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708962486490398:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:05.704363Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708962486490453:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:06.590235Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708945306618500:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:06.591046Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:07.478628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28729, MsgBus: 20990 2026-01-10T12:28:48.531488Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708889747011691:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:48.531727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:48.751618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:48.781542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:48.781645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:48.816109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:48.862317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:48.975694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:48.999720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:48.999754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:48.999763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:48.999851Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:49.422473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:49.474401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.541037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:49.605980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.770964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.842925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:51.830319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708902631915316:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.830481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.830875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708902631915326:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:51.830942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.284778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.317864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.351619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.380381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.409698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.442347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.494342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.540688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.615134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708906926883493:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.615223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708906926883498:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.615280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.615505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708906926883500:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.615543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.618875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:52.629236Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708906926883501:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:52.688514Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708906926883553:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:53.529914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708889747011691:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:53.530009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:54.038683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 61539, MsgBus: 6264 2026-01-10T12:28:55.414103Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... or outdated, will use file: (empty maybe) 2026-01-10T12:29:02.686699Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:02.686707Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:02.686811Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:02.796778Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:03.100375Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:03.108635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.156266Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.287233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.349835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.529709Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.548795Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961146105989:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.548908Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.549230Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961146105998:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.549314Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.651511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.684562Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.715698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.747284Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.780877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.816826Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.856500Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.905897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.986306Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961146106869:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.986408Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.986434Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961146106874:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.986560Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961146106876:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.986605Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.990078Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:05.999989Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708961146106877:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:06.095387Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708965441074225:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:07.518776Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708948261202275:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:07.518863Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:07.739046Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.348884Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593708974031009223:2548], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01keky3fdpdmsrmt6qe8f6aqz5. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzRkYWQxMDItMTJiNWUxYjEtNWY3YTQ2YjgtODA0NDA0OTA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-10T12:29:08.349161Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593708974031009224:2549], TxId: 281474976710677, task: 2. Ctx: { CheckpointId : . TraceId : 01keky3fdpdmsrmt6qe8f6aqz5. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzRkYWQxMDItMTJiNWUxYjEtNWY3YTQ2YjgtODA0NDA0OTA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7593708974031009220:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:29:08.349525Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzRkYWQxMDItMTJiNWUxYjEtNWY3YTQ2YjgtODA0NDA0OTA=, ActorId: [3:7593708969736041784:2521], ActorState: ExecuteState, LegacyTraceId: 01keky3fdpdmsrmt6qe8f6aqz5, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6176, MsgBus: 2798 2026-01-10T12:28:40.752977Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708854799607086:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:40.753024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:40.954940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:40.977534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:40.977647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:41.045211Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:41.045935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708854799607056:2081] 1768048120751487 != 1768048120751490 2026-01-10T12:28:41.061883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:41.145540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:41.145598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:41.145615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:41.145736Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:41.152744Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:41.528584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:41.582645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.701018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.780857Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:41.856411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:41.927858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:43.987876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708867684510831:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:43.987986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:43.988332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708867684510841:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:43.988380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.372271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.410382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.441814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.476806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.512678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.561425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.597025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.644859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:44.714691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708871979479003:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.714757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.714803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708871979479008:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.715993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708871979479010:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.716103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:44.718168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:44.731105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708871979479011:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:44.808427Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708871979479063:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:45.753428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708854799607086:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:45.753495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:46.619554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... elf_id=[3:7593708938704376283:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:59.680501Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:59.694538Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:59.768029Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:59.814172Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:59.814269Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:59.817664Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:59.874951Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:59.878336Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:59.878360Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:59.878372Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:59.878458Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:00.349104Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:00.367174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.431511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.656390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.704882Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:00.735522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.250420Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708955884247303:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.250513Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.250737Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708955884247312:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.250785Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.324548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.354451Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.382209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.413170Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.445201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.481015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.554901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.606643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.673056Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708955884248184:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.673134Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708955884248189:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.673186Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.673468Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708955884248192:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.673545Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:03.676410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:03.691156Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708955884248191:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:29:03.790666Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708955884248246:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:04.680600Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708938704376283:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:04.680696Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:05.435174Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.511820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> TColumnShardTestReadWrite::RebootWriteRead >> Normalizers::SchemaVersionsNormalizer >> KqpExplain::CompoundKeyRange [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TColumnShardTestReadWrite::Write [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> KqpImmediateEffects::Interactive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21769, MsgBus: 2909 2026-01-10T12:28:48.802747Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708890009421097:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:48.803118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:49.036478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:49.043316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:49.043464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:49.046578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:49.125221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:49.133237Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708890009420923:2081] 1768048128794497 != 1768048128794500 2026-01-10T12:28:49.220719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:49.220751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:49.220782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:49.220884Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:49.249096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:49.603773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:49.611655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:28:49.660764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.803963Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:49.804612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:49.959756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:50.025716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.109060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708907189291980:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.109185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.109795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708907189291990:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.109846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.463266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.497884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.533713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.567967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.599305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.642416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.669247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.708210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:52.778916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708907189292859:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.778976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.779175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708907189292864:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.779189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708907189292865:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.779211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:52.782713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:52.793047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708907189292868:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:28:52.863239Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708907189292921:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:53.801677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708890009421097:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:53.801742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:54.355229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... :29:02.317832Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708948187002642:2081] 1768048142232802 != 1768048142232805 2026-01-10T12:29:02.325742Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:02.362159Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:02.362255Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:02.364551Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:02.407474Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:02.407505Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:02.407513Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:02.407601Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:02.507255Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:02.824803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:02.833035Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:02.884490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.070290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.130411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:03.256569Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.605247Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961071906397:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.605374Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.605650Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708961071906407:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.605705Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.674579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.703234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.732065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.763297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.837503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.882138Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.924172Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.973175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:06.049939Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708965366874575:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.050064Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.050207Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708965366874580:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.050365Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708965366874582:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.050423Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.054142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:06.065402Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708965366874584:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:06.168661Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708965366874635:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:07.234034Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708948187002681:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:07.234152Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:07.749460Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:07.789099Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:07.837051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: 2026-01-10T12:26:26.637395Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708277971461395:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:26.637460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:26.906891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:27.052670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:27.052832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:27.089876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:27.188860Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:27.188882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:27.194526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708277971461353:2082] 1768047986635364 != 1768047986635367 2026-01-10T12:26:27.392774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:27.392817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:27.392833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:27.392932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:27.481253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:27.645207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:27.803824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:26:27.836310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:27.836386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:27.838995Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:26:27.846697Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708283625768552:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:27.846802Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:27.852839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:27.893862Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:26:27.913017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:26:28.009968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:28.010088Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:28.010892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011114Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011180Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011279Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011411Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011478Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011532Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011607Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.011692Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:26:28.017269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:28.095777Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:26:28.095844Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:26:28.199517Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:26:28.199580Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:26:28.199759Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:26:28.199790Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:26:28.199829Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:26:28.199874Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:26:28.199902Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:26:28.199930Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:26:28.212729Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7593708287920736378:2299] 2026-01-10T12:26:28.212768Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:26:28.212841Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:26:28.218479Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:28.226432Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:26:28.242493Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. Describe result: PathErrorUnknown 2026-01-10T12:26:28.242541Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. Creating table 2026-01-10T12:26:28.242646Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:26:28.248559Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7593708287920736527:2390], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:26:28.248998Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:26:28.249066Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:7593708287920736529:2319], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:26:28.261848Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7593708287920736488:2375] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-10T12:26:28.284486Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-10T12:26:28.293575Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7593708287920736573:2428] 2026-01-10T12:26:28.296199Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7593708287920736573:2428], schemeshard id = 72075186224037897 2026-01-10T12:26:28.368002Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:26:28.379943Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7593708287920736732:2576], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:26:28.394329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:28.399460Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593708287920736421:2297]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:26:28.399520Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:7593708287920736440:2299] Owner: [2:7593 ... 25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:04.210762Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:04.327253Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:04.327287Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:04.327299Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:04.327421Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:04.352421Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:04.917865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:04.941326Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.015671Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.120894Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.199713Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.369329Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.817844Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593708975711231028:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:08.817952Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:08.818260Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593708975711231037:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:08.818317Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:08.921158Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.971631Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.010923Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.048238Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.070500Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7593708958531359999:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:09.070603Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:09.096596Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.147936Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.198648Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.258005Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.352510Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593708980006199212:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:09.352609Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:09.352675Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593708980006199217:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:09.352804Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7593708980006199219:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:09.352889Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:09.357588Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:09.371358Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7593708980006199221:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:09.465388Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7593708980006199272:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpErrors::ResolveTableError [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> Normalizers::SchemaVersionsNormalizer [GOOD] >> Normalizers::RemoveWriteIdNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 64595, MsgBus: 16805 2026-01-10T12:28:53.180612Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708909818546204:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:53.180958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:53.389346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:53.389453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:53.418875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:53.460336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:53.460361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:53.568312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:53.568334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:53.568343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:53.568441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:53.685921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:53.972310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:53.978709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:28:54.023867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.147083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.245567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:54.283550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:54.352210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:55.923381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708918408482538:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.923543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.923930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708918408482548:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:55.923985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.254919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.281975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.307785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.334009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.361857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.393768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.454328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.491188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.563899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708922703450717:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.563987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.564134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708922703450722:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.564541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708922703450724:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.564637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:56.568108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:56.580024Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708922703450725:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:56.676403Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708922703450777:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:58.182391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708909818546204:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:58.183973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:28:58.254628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... teTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 23017, MsgBus: 9180 2026-01-10T12:29:07.455535Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708971179294424:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:07.455605Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:07.467699Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:07.544232Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:07.593778Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:07.593890Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:07.597681Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:07.640072Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:07.640110Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:07.640117Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:07.640211Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:07.694981Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:08.030492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:08.049714Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.107915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.351540Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.412600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.467085Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:10.544451Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708984064198145:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.544554Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.544886Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708984064198155:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.544966Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.602867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.630871Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.660295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.689324Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.716269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.779630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.814339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.856719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.928366Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708984064199026:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.928454Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.928588Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708984064199031:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.928806Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708984064199033:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.928861Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.932297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:10.943079Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708984064199035:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:11.044036Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708988359166382:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:12.456033Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708971179294424:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:12.456130Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:12.739792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2026-01-10T12:29:13.187096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:13.220195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:13.220480Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:13.228018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:13.228298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:13.228510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:13.228626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:13.228752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:13.228852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:13.228943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:13.229065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:13.229167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:13.229286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:13.229416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:13.229552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:13.229665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:13.260259Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:13.260622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:13.260707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:13.260892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:13.261038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:13.261112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:13.261165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:13.261276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:13.261346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:13.261391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:13.261421Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:13.261619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:13.261682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:13.261729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:13.261768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:13.261879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:13.261930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:13.261971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:13.262012Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:13.262071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:13.262113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:13.262166Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:13.262223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:13.262271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:13.262298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:13.262521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:13.262632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:13.262663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:13.262802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:13.262847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:13.262877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:13.262942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:13.262997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:13.263026Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:13.263069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:13.263106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:13.263136Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:13.263263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:13.263333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:14.165944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=10;finished=1; 2026-01-10T12:29:14.165978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:14.166014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:14.166204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:14.166363Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.166411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:14.166534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-10T12:29:14.166582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=message; 2026-01-10T12:29:14.166794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2026-01-10T12:29:14.166946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.167057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.167166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.167281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:14.167415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.167549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.167767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:372:2384] finished for tablet 9437184 2026-01-10T12:29:14.168230Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:371:2383];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":1404749,"name":"_full_task","f":1404749,"d_finished":0,"c":0,"l":1412180,"d":7431},"events":[{"name":"bootstrap","f":1404930,"d_finished":956,"c":1,"l":1405886,"d":956},{"a":1411632,"name":"ack","f":1410544,"d_finished":1010,"c":1,"l":1411554,"d":1558},{"a":1411623,"name":"processing","f":1405976,"d_finished":2922,"c":3,"l":1411556,"d":3479},{"name":"ProduceResults","f":1405533,"d_finished":1873,"c":6,"l":1411943,"d":1873},{"a":1411948,"name":"Finish","f":1411948,"d_finished":0,"c":0,"l":1412180,"d":232},{"name":"task_result","f":1405988,"d_finished":1866,"c":2,"l":1410404,"d":1866}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.168305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:14.168703Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:371:2383];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":1404749,"name":"_full_task","f":1404749,"d_finished":0,"c":0,"l":1412709,"d":7960},"events":[{"name":"bootstrap","f":1404930,"d_finished":956,"c":1,"l":1405886,"d":956},{"a":1411632,"name":"ack","f":1410544,"d_finished":1010,"c":1,"l":1411554,"d":2087},{"a":1411623,"name":"processing","f":1405976,"d_finished":2922,"c":3,"l":1411556,"d":4008},{"name":"ProduceResults","f":1405533,"d_finished":1873,"c":6,"l":1411943,"d":1873},{"a":1411948,"name":"Finish","f":1411948,"d_finished":0,"c":0,"l":1412709,"d":761},{"name":"task_result","f":1405988,"d_finished":1866,"c":2,"l":1410404,"d":1866}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:14.168781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:14.158964Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:29:14.168825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:14.168949Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:10.807694Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.807939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.935380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:10.936288Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:11.353044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:11.449424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.449593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.450552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.450643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.496103Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:29:11.497190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.497582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.525366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:11.597770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:12.215510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:12.285136Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.285336Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.611784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:14.225884Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-10T12:29:14.226989Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-10T12:29:14.228507Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:831} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-10T12:29:14.228623Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:846} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-10T12:29:14.229563Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710658. Resolved key sets: 1 2026-01-10T12:29:14.229757Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-10T12:29:14.235859Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:29:14.242018Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1854:3211] 2026-01-10T12:29:14.242103Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1854:3211], channels: 0 2026-01-10T12:29:14.243847Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:29:14.243998Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2744} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-10T12:29:14.244100Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1854:3211] 2026-01-10T12:29:14.244161Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1854:3211], channels: 0 2026-01-10T12:29:14.244848Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1854:3211], trace_id# 2026-01-10T12:29:14.244933Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1854:3211], trace_id# 2026-01-10T12:29:14.245010Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:29:14.314704Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1854:3211] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:29:14.314831Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1854:3211], trace_id# 2026-01-10T12:29:14.314902Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1854:3211], trace_id# 2026-01-10T12:29:14.316354Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1854:3211] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 13329 Tasks { TaskId: 1 CpuTimeUs: 7421 FinishTimeMs: 1768048154315 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 1338 BuildCpuTimeUs: 6083 HostName: "ghrun-tjszgfszoq" NodeId: 1 CreateTimeMs: 1768048154248 UpdateTimeMs: 1768048154315 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:29:14.316459Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1854:3211] 2026-01-10T12:29:14.316542Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:1850:3211] trace_id# 2026-01-10T12:29:14.316627Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.013329s ReadRows: 0 ReadBytes: 0 RequestUnits# 8 ForceFlag# true trace_id# 2026-01-10T12:29:14.360122Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:29:14.360232Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [1:1851:3211] TxId: 281474976710658. Ctx: { TraceId: 01keky3mxd4ztnrkswfaj5z90p, Database: , SessionId: ydb://session/3?node_id=1&id=YzgzM2QzOTctOTcxNzc1MDAtNjQ5OWI4ZGYtNmU2NDcxZTM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-10T12:29:14.462169Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1871:3230], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2026-01-10T12:29:14.464997Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Yjc2YzliYmItYTNiYmIwMTktNWM5MjZmZWItYTNhZDgwYTA=, ActorId: [1:1869:3228], ActorState: ExecuteState, LegacyTraceId: 01keky3nqx1e7sdsjyvyapxyh2, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[/Root/table-1]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> TColumnShardTestReadWrite::WriteExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1152, MsgBus: 6115 2026-01-10T12:28:55.123627Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708921730200726:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:55.123733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:55.388348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:55.402175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:55.402253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:55.404404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:55.430144Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708921730200691:2081] 1768048135122244 != 1768048135122247 2026-01-10T12:28:55.433001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:55.497963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:55.498007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:55.498020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:55.498138Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:55.632225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:55.861110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:55.912846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.045799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.132890Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:56.172182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:56.240590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.240399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708934615104454:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.240581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.241054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708934615104464:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.241109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.535484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.563562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.592670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.621881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.650993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.684603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.747003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.787820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:58.857280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708934615105334:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.857370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.857458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708934615105339:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.857612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708934615105341:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.857798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:58.861480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:58.871345Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708934615105343:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:28:58.964601Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708934615105394:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:00.124207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708921730200726:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:00.124297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:00.568639Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-10T12:29:00.577526Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_ ... ode_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:08.400825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:08.460205Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:08.460229Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:08.460237Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:08.460329Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:08.477145Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:08.860625Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:08.879105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.939891Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.094489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.157435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:09.286141Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:11.806631Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708986517616080:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.806714Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.806925Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708986517616089:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.806951Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.891318Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.924639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.954884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.984724Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:12.017234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:12.049080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:12.080637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:12.123246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:12.195094Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708990812584255:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:12.195173Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708990812584260:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:12.195255Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:12.195486Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7593708990812584263:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:12.195530Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:12.199168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:12.211200Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593708990812584262:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:12.274009Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593708990812584315:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:13.254680Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708973632712357:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:13.254771Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:14.269166Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7593708999402519217:2535], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01keky3ndf1z20ck3cvskngrex. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjEyN2QzZWItNDczMTE4MDgtNDU3ZmJmMDItMWZmZmVhNzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-10T12:29:14.269382Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7593708999402519218:2536], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01keky3ndf1z20ck3cvskngrex. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjEyN2QzZWItNDczMTE4MDgtNDU3ZmJmMDItMWZmZmVhNzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7593708999402519214:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-10T12:29:14.269700Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NjEyN2QzZWItNDczMTE4MDgtNDU3ZmJmMDItMWZmZmVhNzU=, ActorId: [3:7593708999402519172:2521], ActorState: ExecuteState, LegacyTraceId: 01keky3ndf1z20ck3cvskngrex, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2026-01-10T12:29:09.343449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.378756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.378987Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.386341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.386583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.386907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.387043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.387145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.387241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.387355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.387494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.387604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.387708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.387833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.387971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.388101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.420037Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.420375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.420431Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.420614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.420790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.420874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.420920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.421013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.421089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.421129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.421155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.421336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.421407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.421451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.421489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.421593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.421647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.421691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.421726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.421782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.421854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.421901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.421945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.422000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.422030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.422295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.422411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.422446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.422584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.422632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.422668Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.422723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.422774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.422804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.422869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.422912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.422950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.423078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.423145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-10T12:29:14.057660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; >> KqpBatchDelete::ManyPartitions_1 [GOOD] >> KqpBatchDelete::SimplePartitions [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2026-01-10T12:28:57.113968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708927211871320:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:57.114090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:57.367077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:57.367192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:57.397920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:57.429325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:57.464574Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708927211871292:2081] 1768048137113284 != 1768048137113287 2026-01-10T12:28:57.467600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:57.687675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:57.690469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:57.690503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:57.690511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:57.690613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.071338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.081737Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 94FB8939949EDA7C67CFF40FD2A9BC9E78ACF33F780EE22B47DE0AB491A984B6 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-10T12:28:58.125154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:00.993573Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708941811077062:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:00.993620Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:01.027602Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:01.109598Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:01.111549Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708941811077032:2081] 1768048140989041 != 1768048140989044 2026-01-10T12:29:01.128473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:01.128608Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:01.146167Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:01.205171Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:01.205196Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:01.205203Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:01.205285Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:01.280384Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:01.432759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:01.438990Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 4D756F4B91BDC69AD3FF6AD58A191165C618FC971D231BC6F4940BF6C5B553F8 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2026-01-10T12:29:01.439740Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 4D756F4B91BDC69AD3FF6AD58A191165C618FC971D231BC6F4940BF6C5B553F8: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2026-01-10T12:29:04.716513Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708958379493915:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:04.716588Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:04.729453Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:04.797887Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708958379493888:2081] 1768048144715563 != 1768048144715566 2026-01-10T12:29:04.798464Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:04.813884Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:04.813975Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:04.836787Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:04.892886Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:04.892915Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:04.892923Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:04.893004Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:04.900831Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:05.128309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:05.137246Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 4D5FA0D17AF815D6E3EA1F3B0D87CF941175D18186EA997C6EE591EACB332DBB () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-10T12:29:05.137959Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 4D5FA0D17AF815D6E3EA1F3B0D87CF941175D18186EA997C6EE591EACB332DBB: Cannot create token from certificate. Client certificate failed verification 2026-01-10T12:29:08.281167Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708975772752963:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:08.309818Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:08.390767Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:08.392129Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:08.392397Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708975772752853:2081] 1768048148268333 != 1768048148268336 2026-01-10T12:29:08.434742Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:08.434855Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:08.444420Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:08.494923Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:08.494946Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:08.494954Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:08.495033Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:08.601304Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:08.773900Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:08.783947Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-10T12:29:08.784023Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb28a84f350] Connect to grpc://localhost:10805 2026-01-10T12:29:08.788751Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb28a84f350] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-10T12:29:08.804950Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb28a84f350] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-10T12:29:08.805213Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-10T12:29:08.805397Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:08.806137Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-10T12:29:08.806452Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb28a84f350] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-10T12:29:08.809153Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cb28a84f350] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-10T12:29:08.809306Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-10T12:29:08.809379Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2026-01-10T12:29:12.143782Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593708994447294625:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:12.143846Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:12.157258Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:12.250095Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:12.251695Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593708994447294601:2081] 1768048152143314 != 1768048152143317 2026-01-10T12:29:12.265185Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:12.294792Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:12.294869Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:12.296558Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:12.333913Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:12.333943Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:12.333952Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:12.334066Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:12.492769Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:12.616637Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:12.622816Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-10T12:29:12.622864Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cb28a7e12d0] Connect to grpc://localhost:14663 2026-01-10T12:29:12.623899Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb28a7e12d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 14: "Service Unavailable" 2026-01-10T12:29:12.631457Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb28a7e12d0] Status 14 Service Unavailable 2026-01-10T12:29:12.631620Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:29:12.631641Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:29:12.631670Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:12.631751Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-10T12:29:12.632007Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cb28a7e12d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-01-10T12:29:12.634875Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cb28a7e12d0] Status 1 CANCELLED 2026-01-10T12:29:12.635007Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-10T12:29:12.635025Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-10T12:29:12.635041Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TColumnShardTestReadWrite::WriteReadModifications >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2026-01-10T12:29:09.343458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.382183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.382502Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.390967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.391294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.391547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.391693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.391815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.391926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.392058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.392216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.392337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.392487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.392637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.392779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.392898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.426113Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.426516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.426591Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.426789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.426985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.427081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.427158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.427285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.427367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.427420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.427453Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.427653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.427719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.427766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.427808Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.427913Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.427980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.428060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.428124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.428180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.428224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.428255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.428310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.428372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.428406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.428635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.428746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.428782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.428920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.428968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.428999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.429064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.429120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.429169Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.429226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.429278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.429317Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.429460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.429533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:29:13.817130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:13.830079Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f7ce40a8-ee1f11f0-89d6b163-f7d482a0; 2026-01-10T12:29:13.830301Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-10T12:29:13.830342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-10T12:29:13.830383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-10T12:29:13.830761Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-10T12:29:13.830843Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-10T12:29:13.842369Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-10T12:29:13.842529Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:13.844196Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f82af9b0-ee1f11f0-b8f7aa45-856018f0; 2026-01-10T12:29:13.844374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-10T12:29:13.844411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-10T12:29:13.844456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-10T12:29:13.844798Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-10T12:29:13.844867Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-10T12:29:13.856510Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-10T12:29:13.856664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:13.858276Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f877b5fc-ee1f11f0-98a3c25b-d962c660; 2026-01-10T12:29:13.858489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-10T12:29:13.858537Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-10T12:29:13.858580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-10T12:29:13.858975Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-10T12:29:13.859050Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-10T12:29:13.871207Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-10T12:29:13.871427Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:13.890667Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-10T12:29:13.890763Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=f916d786-ee1f11f0-9efd247c-793b6826;in_flight=1;size_in_flight=6330728; 2026-01-10T12:29:14.818169Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-10T12:29:14.874289Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f916d786-ee1f11f0-9efd247c-793b6826; 2026-01-10T12:29:14.874592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-10T12:29:14.874664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-10T12:29:14.874762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-10T12:29:14.875398Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-10T12:29:14.875528Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-10T12:29:14.887956Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-10T12:29:14.888221Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] Test command err: 2026-01-10T12:29:09.343425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.367527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.368535Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.383976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.384212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.384407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.384474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.384535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.384611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.384694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.384800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.384876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.384995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.385080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.385163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.385242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.408698Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.408971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.409023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.409201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.410409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.410452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.410554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.410665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.410685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.410861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.410981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.411020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.411119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.411238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.411266Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.411313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.411348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.411376Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.411432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.411468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.411487Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.411641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.411721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.411744Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.411842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.411870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.411888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.411922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.411958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.411991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.412025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.412069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.412125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.412323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.412402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:29:14.427277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:14.445951Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f7e9d570-ee1f11f0-8711254d-13544607; 2026-01-10T12:29:14.446211Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-10T12:29:14.446262Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-10T12:29:14.446307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-10T12:29:14.446815Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-10T12:29:14.446922Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-10T12:29:14.458912Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-10T12:29:14.459151Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:14.461066Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f861b572-ee1f11f0-a0c6e92b-c1834783; 2026-01-10T12:29:14.461289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-10T12:29:14.461341Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-10T12:29:14.461387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-10T12:29:14.461903Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-10T12:29:14.462012Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-10T12:29:14.474233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-10T12:29:14.474432Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:14.476267Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f8d106a2-ee1f11f0-9b3cd11c-e104c8f5; 2026-01-10T12:29:14.476533Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-10T12:29:14.476601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-10T12:29:14.476663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-10T12:29:14.477225Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-10T12:29:14.477340Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-10T12:29:14.489457Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-10T12:29:14.489725Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:14.512800Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-10T12:29:14.512904Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=f975c5d4-ee1f11f0-99a92f63-a112d3e7;in_flight=1;size_in_flight=6330728; 2026-01-10T12:29:15.227979Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-10T12:29:15.282141Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=f975c5d4-ee1f11f0-99a92f63-a112d3e7; 2026-01-10T12:29:15.282504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-10T12:29:15.282582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-10T12:29:15.282659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-10T12:29:15.283388Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-10T12:29:15.283542Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-10T12:29:15.299594Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-10T12:29:15.299842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |95.9%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 10350, MsgBus: 27954 2026-01-10T12:25:13.164792Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707966433708691:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:13.164880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:13.424681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:13.428408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:13.428548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:13.434042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:13.534671Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707966433708652:2081] 1768047913163376 != 1768047913163379 2026-01-10T12:25:13.540201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:13.615606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:13.615627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:13.615633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:13.615734Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:13.714513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:14.090727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:14.143840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.195056Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:14.280875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.433774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.512247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.554578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707979318612414:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.554813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.555282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707979318612424:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.555355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:16.860637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.892937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.930506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.965539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:16.995613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.031590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.066259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.113931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:17.183673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707983613580587:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.183741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.183785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707983613580592:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.183957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707983613580594:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.183996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:17.187747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:17.197269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707983613580595:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:17.276024Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707983613580649:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:18.164774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707966433708691:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:18.164829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11724, MsgBus: 26263 2026-01-10T12:25:25.105731Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708019734516266:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:25.105881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_s ... 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 21802, MsgBus: 21329 2026-01-10T12:28:59.842969Z node 16 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7593708937546896821:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:59.843180Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:59.861638Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:59.955986Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:59.957363Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7593708937546896671:2081] 1768048139834253 != 1768048139834256 2026-01-10T12:28:59.979379Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:59.979493Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:00.012972Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:00.061232Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:00.061258Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:00.061269Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:00.061371Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:00.162827Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:00.527654Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:00.544614Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.611019Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.818255Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:00.869155Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:00.988358Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:04.842108Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7593708937546896821:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:04.842191Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:05.474828Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593708963316702320:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.474955Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.475345Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593708963316702329:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.475408Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:05.587043Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.626810Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.661328Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.696542Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.729506Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.767692Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.808441Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.936165Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:06.022866Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593708967611670519:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.022981Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593708967611670524:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.022981Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.023229Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593708967611670526:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.023290Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:06.027106Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:06.039150Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7593708967611670527:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:29:06.107786Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7593708967611670579:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:14.941813Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:29:14.941850Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 14007, MsgBus: 8934 2026-01-10T12:25:22.301251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708005283122337:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:22.301330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:22.636195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:22.642502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:22.642598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:22.693781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:22.761731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:22.766435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708005283122308:2081] 1768047922299632 != 1768047922299635 2026-01-10T12:25:22.827373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:22.842628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:22.842644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:22.842678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:22.842740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:23.287009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:23.293645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:23.321577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:23.340013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.463570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.623844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:23.694611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:25.639606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708018168026090:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.639745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.640251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708018168026100:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.640311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:25.992831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.032779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.073104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.108696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.153517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.183778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.218950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.281076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:26.351309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022462994266:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.351414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.351436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022462994271:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.351661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708022462994273:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.351710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:26.355106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:26.365678Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708022462994274:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:26.453828Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708022462994326:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:27.327410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708005283122337:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:27.327712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:28.054039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 1Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7593708955222013470:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:03.889895Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:03.911555Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:04.057018Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:04.060946Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7593708955222013442:2081] 1768048143888324 != 1768048143888327 2026-01-10T12:29:04.082791Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:04.082965Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:04.128975Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:04.170388Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:04.216541Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:04.216580Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:04.216593Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:04.216733Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:04.899062Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.417605Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:05.444523Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.558806Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.860039Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:05.985674Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:08.890090Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7593708955222013470:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:08.890225Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:10.562352Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593708985286786406:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.562537Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.562968Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593708985286786416:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.563047Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:10.685523Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.740298Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.793131Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.845268Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.893903Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:10.951049Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.003868Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.140110Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:11.258818Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593708989581754608:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.258891Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593708989581754613:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.258962Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.259205Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593708989581754616:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.259264Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:11.264715Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:11.280471Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7593708989581754615:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:11.345857Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7593708989581754668:3788] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:14.168298Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17195, MsgBus: 18694 2026-01-10T12:24:55.942975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707890534498741:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:24:55.943134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:24:56.014033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:24:56.175639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:56.177465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:56.177562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:56.177905Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593707890534498705:2081] 1768047895941330 != 1768047895941333 2026-01-10T12:24:56.180444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:56.197130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:24:56.302943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:56.302969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:56.302975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:56.303071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:56.681216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 16 2026-01-10T12:24:56.952169Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:58.598449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:58.696701Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2026-01-10T12:24:58.719529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707903419401581:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.719534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707903419401576:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.719609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.719833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707903419401591:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.719904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:58.722826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:58.731420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707903419401590:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-10T12:24:58.841125Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707903419401643:2594] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2026-01-10T12:24:59.178044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.214603Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2026-01-10T12:24:59.592151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:59.628678Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2026-01-10T12:25:00.116991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.156714Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2026-01-10T12:25:00.645789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:00.694782Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 2026-01-10T12:25:00.943175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707890534498741:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:00.943242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 700 2026-01-10T12:25:01.131844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:01.172797Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2026-01-10T12:25:01.677844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:01.723733Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, A ... 80, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:05.547874Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7593708961754273645:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:29:05.643420Z node 9 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [9:7593708961754273697:2599] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:06.014953Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [9:7593708961754273745:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-10T12:29:06.015541Z node 9 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=9&id=NTEzYjE1ZWItODBjMDJhNy0zODQ1M2NmOS0yODNhMDlkZQ==, ActorId: [9:7593708961754273738:2353], ActorState: ExecuteState, LegacyTraceId: 01keky3dhsatj61c5eywy10dbh, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-10T12:29:06.038453Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 4110, MsgBus: 28960 2026-01-10T12:29:07.978758Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7593708969815812583:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:07.978866Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:08.001316Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:08.204752Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:08.207580Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7593708969815812554:2081] 1768048147977015 != 1768048147977018 2026-01-10T12:29:08.207676Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:08.233395Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:08.233533Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:08.266222Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:08.363750Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:08.363781Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:08.363792Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:08.363936Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:08.489723Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:08.993426Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:09.154651Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:09.163602Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:29:12.979302Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7593708969815812583:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:12.979400Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:13.953577Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708995585617223:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.953736Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.954357Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708995585617232:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.954467Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.982065Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:14.047732Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708999880584629:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:14.047855Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:14.047947Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708999880584634:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:14.048096Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7593708999880584636:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:14.048167Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:14.055177Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:14.073336Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7593708999880584638:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:29:14.150135Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7593708999880584689:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:14.923064Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7593708999880584749:2364], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-10T12:29:14.923661Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=NzQ5ZjYxMDktNDhjYjYzZWItNzU4YTMzMzctYzVjZjI2MDY=, ActorId: [10:7593708999880584742:2360], ActorState: ExecuteState, LegacyTraceId: 01keky3p856e3met4mvgrnf2v3, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-10T12:29:14.930995Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> Normalizers::CleanUnusedTablesNormalizer >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 |95.9%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2026-01-10T12:29:12.754369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:12.791025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:12.791306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:12.799409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:12.799695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:12.799954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:12.800110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:12.800232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:12.800385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:12.800504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:12.800628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:12.800765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:12.800887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.801022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:12.801171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:12.801293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:12.834310Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:12.834721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:12.834809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:12.835023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:12.835209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:12.835293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:12.835341Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:12.835494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:12.835579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:12.835626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:12.835660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:12.835871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:12.835947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:12.835996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:12.836039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:12.836182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:12.836246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:12.836301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:12.836334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:12.836394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:12.836440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:12.836475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:12.836535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:12.836577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:12.836606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:12.836844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:12.836976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:12.837014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:12.837151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:12.837198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.837242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.837328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:12.837382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:12.837414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:12.837460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:12.837504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:12.837537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:12.837699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:12.837766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:18.385905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:18.386281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:18.386533Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.386723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.386920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.387202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:18.387427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.387645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.388153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-10T12:29:18.388823Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.036},{"events":["f_ack"],"t":0.037},{"events":["l_ProduceResults","f_Finish"],"t":0.039},{"events":["l_ack","l_processing","l_Finish"],"t":0.04}],"full":{"a":6009813,"name":"_full_task","f":6009813,"d_finished":0,"c":0,"l":6050048,"d":40235},"events":[{"name":"bootstrap","f":6010118,"d_finished":1976,"c":1,"l":6012094,"d":1976},{"a":6048977,"name":"ack","f":6046918,"d_finished":1851,"c":1,"l":6048769,"d":2922},{"a":6048957,"name":"processing","f":6012310,"d_finished":5500,"c":3,"l":6048774,"d":6591},{"name":"ProduceResults","f":6011461,"d_finished":3257,"c":6,"l":6049494,"d":3257},{"a":6049501,"name":"Finish","f":6049501,"d_finished":0,"c":0,"l":6050048,"d":547},{"name":"task_result","f":6012335,"d_finished":3570,"c":2,"l":6046651,"d":3570}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.388926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:18.389545Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.036},{"events":["f_ack"],"t":0.037},{"events":["l_ProduceResults","f_Finish"],"t":0.039},{"events":["l_ack","l_processing","l_Finish"],"t":0.04}],"full":{"a":6009813,"name":"_full_task","f":6009813,"d_finished":0,"c":0,"l":6050789,"d":40976},"events":[{"name":"bootstrap","f":6010118,"d_finished":1976,"c":1,"l":6012094,"d":1976},{"a":6048977,"name":"ack","f":6046918,"d_finished":1851,"c":1,"l":6048769,"d":3663},{"a":6048957,"name":"processing","f":6012310,"d_finished":5500,"c":3,"l":6048774,"d":7332},{"name":"ProduceResults","f":6011461,"d_finished":3257,"c":6,"l":6049494,"d":3257},{"a":6049501,"name":"Finish","f":6049501,"d_finished":0,"c":0,"l":6050789,"d":1288},{"name":"task_result","f":6012335,"d_finished":3570,"c":2,"l":6046651,"d":3570}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:18.389649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:18.343877Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-10T12:29:18.389706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:18.390004Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] Test command err: 2026-01-10T12:29:12.910379Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:12.933687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:12.933850Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:12.939885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2026-01-10T12:29:12.940139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:12.940340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:12.940531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:12.940628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:12.940724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:12.940862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:12.940980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:12.941080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:12.941182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:12.941307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.941446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:12.941576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:12.941684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:12.966262Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:12.966427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2026-01-10T12:29:12.966479Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:29:12.966704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:12.966764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-10T12:29:12.966811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-10T12:29:12.966939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:12.967011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:12.967054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:12.967075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-10T12:29:12.967134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:12.967167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:12.967196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:12.967221Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:12.967336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:12.967375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:12.967403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:12.967420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:12.967491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:12.967539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:12.967572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:12.967593Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:12.967624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:12.967647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:12.967663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:12.967687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:12.967716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:12.967733Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:12.967911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:12.967946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:12.967970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:12.968129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:12.968158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.968179Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:12.968226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:12.968265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:12.968296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=n ... {class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:19.079253Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-10T12:29:19.079283Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:19.079327Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:19.079860Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:19.079990Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.080021Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:19.080181Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-10T12:29:19.080230Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-10T12:29:19.080358Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-10T12:29:19.080487Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.080599Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.080803Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.080917Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:19.081027Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.081136Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.081443Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2026-01-10T12:29:19.081993Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.053},{"events":["l_task_result"],"t":0.687},{"events":["l_ProduceResults","f_Finish"],"t":0.688},{"events":["l_ack","l_processing","l_Finish"],"t":0.689}],"full":{"a":5881783,"name":"_full_task","f":5881783,"d_finished":0,"c":0,"l":6571031,"d":689248},"events":[{"name":"bootstrap","f":5882038,"d_finished":1340,"c":1,"l":5883378,"d":1340},{"a":6570427,"name":"ack","f":5935523,"d_finished":271537,"c":421,"l":6570360,"d":272141},{"a":6570418,"name":"processing","f":5883546,"d_finished":579405,"c":843,"l":6570363,"d":580018},{"name":"ProduceResults","f":5882945,"d_finished":467572,"c":1266,"l":6570681,"d":467572},{"a":6570691,"name":"Finish","f":6570691,"d_finished":0,"c":0,"l":6571031,"d":340},{"name":"task_result","f":5883575,"d_finished":298625,"c":422,"l":6568870,"d":298625}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.082092Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:19.082570Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.053},{"events":["l_task_result"],"t":0.687},{"events":["l_ProduceResults","f_Finish"],"t":0.688},{"events":["l_ack","l_processing","l_Finish"],"t":0.689}],"full":{"a":5881783,"name":"_full_task","f":5881783,"d_finished":0,"c":0,"l":6571668,"d":689885},"events":[{"name":"bootstrap","f":5882038,"d_finished":1340,"c":1,"l":5883378,"d":1340},{"a":6570427,"name":"ack","f":5935523,"d_finished":271537,"c":421,"l":6570360,"d":272778},{"a":6570418,"name":"processing","f":5883546,"d_finished":579405,"c":843,"l":6570363,"d":580655},{"name":"ProduceResults","f":5882945,"d_finished":467572,"c":1266,"l":6570681,"d":467572},{"a":6570691,"name":"Finish","f":6570691,"d_finished":0,"c":0,"l":6571668,"d":977},{"name":"task_result","f":5883575,"d_finished":298625,"c":422,"l":6568870,"d":298625}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.082645Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:18.390981Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-10T12:29:19.082690Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:19.082847Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2026-01-10T12:29:18.043618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:18.077647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:18.077933Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:18.085502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:18.085765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:18.086022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:18.086145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:18.086252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:18.086352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:18.086510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:18.086652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:18.086781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:18.086908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:18.087035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:18.087166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:18.087295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:18.117527Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:18.117903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:18.117964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:18.118156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:18.118306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:18.118372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:18.118412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:18.118515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:18.118579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:18.118620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:18.118648Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:18.118837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:18.118913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:18.118955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:18.118990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:18.119099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:18.119172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:18.119240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:18.119275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:18.119342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:18.119387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:18.119422Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:18.119464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:18.119497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:18.119528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:18.119739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:18.119848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:18.119879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:18.120008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:18.120050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:18.120100Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:18.120181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:18.120246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:18.120285Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:18.120328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:18.120364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:18.120391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:18.120543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:18.120610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 7184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-10T12:29:19.551550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=1;prepared=1; 2026-01-10T12:29:19.551670Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=4;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{0,12},{1,13},{2,14},{3,15},];}};]};SF:0;PR:0;); 2026-01-10T12:29:19.551701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2026-01-10T12:29:19.551719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:19.551746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:19.551788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-10T12:29:19.551818Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-10T12:29:19.551846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2026-01-10T12:29:19.551874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2026-01-10T12:29:19.551895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=1; 2026-01-10T12:29:19.551927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=2; 2026-01-10T12:29:19.551948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=3; 2026-01-10T12:29:19.552056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.552226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.552422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:19.552570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.552701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.552999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:458:2470] finished for tablet 9437184 2026-01-10T12:29:19.553383Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:454:2466];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":2033024,"name":"_full_task","f":2033024,"d_finished":0,"c":0,"l":2047573,"d":14549},"events":[{"name":"bootstrap","f":2033204,"d_finished":1137,"c":1,"l":2034341,"d":1137},{"a":2046930,"name":"ack","f":2046930,"d_finished":0,"c":0,"l":2047573,"d":643},{"a":2046914,"name":"processing","f":2034477,"d_finished":5321,"c":5,"l":2046801,"d":5980},{"name":"ProduceResults","f":2033965,"d_finished":1467,"c":7,"l":2047256,"d":1467},{"a":2047259,"name":"Finish","f":2047259,"d_finished":0,"c":0,"l":2047573,"d":314},{"name":"task_result","f":2034495,"d_finished":5255,"c":5,"l":2046799,"d":5255}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.553460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:454:2466];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:19.553843Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:454:2466];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":2033024,"name":"_full_task","f":2033024,"d_finished":0,"c":0,"l":2048024,"d":15000},"events":[{"name":"bootstrap","f":2033204,"d_finished":1137,"c":1,"l":2034341,"d":1137},{"a":2046930,"name":"ack","f":2046930,"d_finished":0,"c":0,"l":2048024,"d":1094},{"a":2046914,"name":"processing","f":2034477,"d_finished":5321,"c":5,"l":2046801,"d":6431},{"name":"ProduceResults","f":2033965,"d_finished":1467,"c":7,"l":2047256,"d":1467},{"a":2047259,"name":"Finish","f":2047259,"d_finished":0,"c":0,"l":2048024,"d":765},{"name":"task_result","f":2034495,"d_finished":5255,"c":5,"l":2046799,"d":5255}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:19.553927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:19.536763Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2026-01-10T12:29:19.553965Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:19.554129Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> Normalizers::CleanUnusedTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::RemoveDeleteFlagNormalizer >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 192 112 28 48 32 24 16 24 56 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> TColumnShardTestReadWrite::ReadStale >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] Test command err: 2026-01-10T12:28:57.091822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708926422192156:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:57.092070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:57.323228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:57.427206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:57.427346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:57.429614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:57.507900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:57.540852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:57.691459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:57.691507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:57.691519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:57.691619Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.085435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.097415Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:58.274223Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-10T12:28:58.274289Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-10T12:28:58.274868Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****US1g (63D1D230)) 2026-01-10T12:29:01.097221Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****US1g (63D1D230) () has now permanent error message 'Login state is not available' 2026-01-10T12:29:01.097306Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root/Db1, 2026-01-10T12:29:01.726133Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708945100058894:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:01.726211Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:01.750112Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:01.823395Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:01.827030Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708945100058867:2081] 1768048141724672 != 1768048141724675 2026-01-10T12:29:01.871001Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:01.871094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:01.874493Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:01.951588Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:01.951610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:01.951619Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:01.951722Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:01.954809Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:02.173931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:02.457763Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:02.464296Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2026-01-10T12:29:02.464340Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:02.464501Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2026-01-10T12:29:02.464527Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2026-01-10T12:29:02.464551Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (5DAB89DE): Token is not in correct format 2026-01-10T12:29:05.231850Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708964465501044:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:05.231904Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:05.247715Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:05.317446Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:05.319249Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708964465501016:2081] 1768048145231087 != 1768048145231090 2026-01-10T12:29:05.330091Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:05.330168Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:05.354487Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:05.428882Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:05.428910Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:05.428917Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:05.429000Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:05.436833Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:05.657270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:05.661522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:29:05.797596Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:05.807454Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:29:05.807503Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:05.808672Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****LPgg (917EA495) () has now valid token of user1 2026-01-10T12:29:05.808709Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-10T12:29:05.809200Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:06.238136Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:10.232326Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708964465501044:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:10.232472Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:10.235879Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****LPgg (917EA495) 2026-01-10T12:29:10.236284Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****LPgg (917EA495) () has now permanent error message 'User not found' 2026-01-10T12:29:13.238717Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****LPgg (917EA495) 2026-01-10T12:29:16.586955Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593709009695552136:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:16.587090Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:16.650889Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:16.656187Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:16.663901Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593709009695552108:2081] 1768048156586108 != 1768048156586111 2026-01-10T12:29:16.704668Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:16.704754Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:16.710453Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:16.736699Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:16.736720Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:16.736727Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:16.736803Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:16.816180Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:16.956887Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:16.966961Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:29:16.967028Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5c46407d50] Connect to grpc://localhost:23822 2026-01-10T12:29:16.971125Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c46407d50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:29:16.980001Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c46407d50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:29:16.980220Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-10T12:29:16.981698Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5c4630b2d0] Connect to grpc://localhost:6011 2026-01-10T12:29:16.982673Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c4630b2d0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-10T12:29:16.990067Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c4630b2d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-10T12:29:16.990383Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-10T12:29:16.990949Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.write) 2026-01-10T12:29:16.991129Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c46407d50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:16.993093Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c46407d50] Response BulkAuthorizeResponse { results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-10T12:29:16.993227Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "" 2026-01-10T12:29:16.993277Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-10T12:29:16.994149Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-10T12:29:16.994347Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c46407d50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:16.995826Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c46407d50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:29:16.995913Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-10T12:29:16.996115Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-10T12:29:20.443634Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593709028035852972:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:20.443685Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:20.457568Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:20.532042Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:20.543271Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593709028035852944:2081] 1768048160442903 != 1768048160442906 2026-01-10T12:29:20.572103Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:20.572255Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:20.579442Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:20.625646Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:20.625677Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:20.625686Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:20.625776Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:20.668315Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:20.855343Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:20.863597Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2026-01-10T12:29:20.863661Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5c46365bd0] Connect to grpc://localhost:9879 2026-01-10T12:29:20.864885Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c46365bd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2026-01-10T12:29:20.872994Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c46365bd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2026-01-10T12:29:20.873178Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2026-01-10T12:29:20.873208Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2026-01-10T12:29:20.873222Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2026-01-10T12:29:20.873245Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2026-01-10T12:29:20.873260Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-10T12:29:20.873412Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5c46365850] Connect to grpc://localhost:20798 2026-01-10T12:29:20.874156Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5c46365850] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-10T12:29:20.880816Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5c46365850] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-10T12:29:20.881108Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2026-01-10T12:29:09.343439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.368215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.368522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.381086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-10T12:29:09.381295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:09.381462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.381610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.381686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.381770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.381872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.381952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.382017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.382087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.382191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.382280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.382367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.382438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.408260Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.408494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-10T12:29:09.408559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:29:09.408905Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-10T12:29:09.410330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:09.410444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-10T12:29:09.410506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-10T12:29:09.410676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.410828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.410865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-10T12:29:09.410987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.411126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.411162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.411353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.411481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.411520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.411630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.411746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.411778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.411830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.411870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.411922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.411980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.412034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.412141Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.412362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.412415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.412448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.412591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.412669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.412703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.412758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.412798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksM ... ending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:22.868656Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-10T12:29:22.868692Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:22.868727Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:22.869266Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:22.869415Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.869453Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:22.869581Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-10T12:29:22.869638Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-10T12:29:22.869805Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-10T12:29:22.869949Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.870080Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.870243Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.870389Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:22.870508Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.870701Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.871069Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2026-01-10T12:29:22.871657Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.04},{"events":["l_task_result"],"t":0.677},{"events":["l_ProduceResults","f_Finish"],"t":0.679},{"events":["l_ack","l_processing","l_Finish"],"t":0.68}],"full":{"a":13441298,"name":"_full_task","f":13441298,"d_finished":0,"c":0,"l":14121358,"d":680060},"events":[{"name":"bootstrap","f":13441520,"d_finished":1340,"c":1,"l":13442860,"d":1340},{"a":14120588,"name":"ack","f":13481787,"d_finished":276341,"c":421,"l":14120510,"d":277111},{"a":14120576,"name":"processing","f":13443028,"d_finished":587162,"c":843,"l":14120513,"d":587944},{"name":"ProduceResults","f":13442438,"d_finished":479029,"c":1266,"l":14120941,"d":479029},{"a":14120953,"name":"Finish","f":14120953,"d_finished":0,"c":0,"l":14121358,"d":405},{"name":"task_result","f":13443053,"d_finished":301643,"c":422,"l":14118962,"d":301643}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.871748Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:22.872302Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.04},{"events":["l_task_result"],"t":0.677},{"events":["l_ProduceResults","f_Finish"],"t":0.679},{"events":["l_ack","l_processing","l_Finish"],"t":0.68}],"full":{"a":13441298,"name":"_full_task","f":13441298,"d_finished":0,"c":0,"l":14122021,"d":680723},"events":[{"name":"bootstrap","f":13441520,"d_finished":1340,"c":1,"l":13442860,"d":1340},{"a":14120588,"name":"ack","f":13481787,"d_finished":276341,"c":421,"l":14120510,"d":277774},{"a":14120576,"name":"processing","f":13443028,"d_finished":587162,"c":843,"l":14120513,"d":588607},{"name":"ProduceResults","f":13442438,"d_finished":479029,"c":1266,"l":14120941,"d":479029},{"a":14120953,"name":"Finish","f":14120953,"d_finished":0,"c":0,"l":14122021,"d":1068},{"name":"task_result","f":13443053,"d_finished":301643,"c":422,"l":14118962,"d":301643}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:22.872394Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:22.189872Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-10T12:29:22.872444Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:22.872618Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2026-01-10T12:29:17.389520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:17.411557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:17.411771Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:17.417632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:17.417871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:17.418066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:17.418133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:17.418204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:17.418289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:17.418362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:17.418463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:17.418538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:17.418609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.418707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:17.418823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:17.418954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:17.439755Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:17.440006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:17.440048Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:17.440216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:17.440344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:17.440394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:17.440425Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:17.440516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:17.440568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:17.440596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:17.440615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:17.440748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:17.440797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:17.440842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:17.440868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:17.440940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:17.440991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:17.441036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:17.441069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:17.441118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:17.441155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:17.441196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:17.441241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:17.441267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:17.441287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:17.441453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:17.441589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:17.441613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:17.441692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:17.441720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.441741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.441781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:17.441843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:17.441867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:17.441903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:17.441950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:17.441986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:17.442138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:17.442190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-10T12:29:23.501279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone >> TStorageBalanceTest::TestScenario3 [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2026-01-10T12:29:25.324099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:25.347079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:25.347268Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:25.353875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:25.354099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:25.354335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:25.354485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:25.354620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:25.354735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:25.354846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:25.354964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:25.355086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:25.355245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.355403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:25.355549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:25.355677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:25.383309Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:25.383568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:25.383645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:25.383793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:25.383957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:25.384029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:25.384067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:25.384179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:25.384242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:25.384274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:25.384304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:25.384440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:25.384499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:25.384533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:25.384560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:25.384634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:25.384675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:25.384702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:25.384735Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:25.384778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:25.384809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:25.384828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:25.384859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:25.384884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:25.384903Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:25.385110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:25.385195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:25.385218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:25.385304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:25.385332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.385350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.385408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:25.385450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:25.385471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:25.385519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:25.385544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:25.385566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:25.385705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:25.385751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:29:26.099769Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-10T12:29:26.100109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:26.100168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:26.100244Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:26.100286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:26.100336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:29:26.116226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:26.116318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:26.116375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:26.116485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:26.116961Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1768047806308:max} readable: {1768048166308:max} at tablet 9437184 2026-01-10T12:29:26.128992Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2026-01-10T12:29:26.130927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:17;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2026-01-10T12:29:26.133519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-10T12:29:26.133699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-10T12:29:26.136085Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6},{"from":8},{"from":10},{"from":12},{"from":14},{"from":16},{"from":18},{"from":20},{"from":22},{"from":24},{"from":26},{"from":28}]},{"owner_id":18,"inputs":[{"from":29}]},{"owner_id":2,"inputs":[{"from":29}]},{"owner_id":20,"inputs":[{"from":29}]},{"owner_id":4,"inputs":[{"from":29}]},{"owner_id":22,"inputs":[{"from":29}]},{"owner_id":6,"inputs":[{"from":29}]},{"owner_id":24,"inputs":[{"from":29}]},{"owner_id":8,"inputs":[{"from":29}]},{"owner_id":26,"inputs":[{"from":29}]},{"owner_id":10,"inputs":[{"from":29}]},{"owner_id":28,"inputs":[{"from":29}]},{"owner_id":29,"inputs":[{"from":30}]},{"owner_id":12,"inputs":[{"from":29}]},{"owner_id":30,"inputs":[]},{"owner_id":14,"inputs":[{"from":29}]},{"owner_id":16,"inputs":[{"from":29}]}],"nodes":{"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":33,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":33,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":33,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043","t":"Projection"},"w":462,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":33,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":33,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":33,"id":16},"24":{"p":{"i":"4294967041","p":{"address":{"name":"_yql_tx_id","id":4294967041}},"o":"4294967041","t":"AssembleOriginalData"},"w":33,"id":24},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":33,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":33,"id":10},"29":{"p":{"i":"0","p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"4294967040,4294967041,1,4294967042,2,4294967043,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":28,"id":29},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":33,"id":6},"30":{"p":{"p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":30},"22":{"p":{"i":"4294967040","p":{"address":{"name":"_yql_plan_step","id":4294967040}},"o":"4294967040","t":"AssembleOriginalData"},"w":33,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":33,"id":12},"28":{"p":{"i":"4294967043","p":{"address":{"name":"_yql_delete_flag","id":4294967043}},"o":"4294967043","t":"AssembleOriginalData"},"w":33,"id":28},"26":{"p":{"i":"4294967042","p":{"address":{"name":"_yql_write_id","id":4294967042}},"o":"4294967042","t":"AssembleOriginalData"},"w":33,"id":26}}}; 2026-01-10T12:29:26.138033Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1768047806308:max}. CS min read snapshot: {1768047866308:max}. now: 2026-01-10T12:29:26.137962Z; 2026-01-10T12:29:26.152796Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768047806308:max} readable: {1768048166308:max} at tablet 9437184 2026-01-10T12:29:26.165070Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:29:26.165324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-10T12:29:26.165411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-10T12:29:26.166230Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2026-01-10T12:29:26.168111Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768047806308:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1768047806308:max}. CS min read snapshot: {1768047866308:max}. now: 2026-01-10T12:29:26.168033Z; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2026-01-10T12:29:21.206169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:21.249436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:21.249743Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:21.259012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:21.259324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:21.259608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:21.259751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:21.259879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:21.259994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:21.260200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:21.260358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:21.260497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:21.260632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:21.260799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:21.260954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:21.261081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:21.297935Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:21.298363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:21.298476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:21.298690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:21.298888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:21.298973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:21.299025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:21.299168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:21.299256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:21.299311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:21.299355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:21.299574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:21.299660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:21.299719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:21.299767Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:21.299896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:21.299968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:21.300026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:21.300114Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:21.300182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:21.300234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:21.300273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:21.300324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:21.300366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:21.300437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:21.300781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:21.300905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:21.300944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:21.301271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:21.301331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:21.301368Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:21.301440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:21.301510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:21.301546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:21.301633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:21.301687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:21.301754Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:21.301930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:21.302004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-10T12:29:26.045189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:10.147612Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.147980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.296532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:10.302204Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:10.312390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:10.312960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:10.313063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:10.315056Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:10.315401Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:10.315448Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:10.873382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:11.019162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.019332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.020261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.020339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.065235Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:29:11.066306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.067717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.150900Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:11.199079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:11.850054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:11.916011Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:11.917474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.263919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:13.133929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1915:3234], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.134082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1926:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.134218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.135023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1929:3242], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.136385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.144538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:13.698847Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1930:3243], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:29:13.868580Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2040:3321] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:14.225838Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-10T12:29:14.226991Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-10T12:29:14.228453Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:831} ActorId: [1:2066:3232] TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-10T12:29:14.228564Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:846} ActorId: [1:2066:3232] TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-10T12:29:14.229535Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710660. Resolved key sets: 1 2026-01-10T12:29:14.229740Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-10T12:29:14.235860Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-10T12:29:14.242035Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:2069:3232] 2026-01-10T12:29:14.242124Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:2069:3232], channels: 0 2026-01-10T12:29:14.243871Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2740} ActorId: [1:2066:3232] TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:29:14.243971Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2744} ActorId: [1:2066:3232] TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb://session/3?node_id=1&id=NDhiMTE0NTMtZWFkNDBmMGEtM2UxY2U2YzktNGNhYjRlMTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-10T12:29:14.244029Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710660. Ctx: { TraceId: 01keky3mh8e5c6k3t5q4544w5w, Database: , SessionId: ydb:// ... tx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.782066Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitSnapshotState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.782147Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitSnapshotState immediate# true trace_id# 2026-01-10T12:29:26.784727Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2124:3363] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:29:26.784806Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.784861Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.785466Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1040} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing task TaskId# 1 ComputeActor# [4:2125:2475] trace_id# 2026-01-10T12:29:26.785521Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [4:2125:2475] 2026-01-10T12:29:26.785595Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:908: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task: 1, output channelId: 1, dst task: 2, at actor [3:2124:3363] 2026-01-10T12:29:26.785654Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:2125:2475], channels: 1 2026-01-10T12:29:26.785713Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2124:3363], channels: 1 2026-01-10T12:29:26.786112Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2125:2475] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:29:26.786164Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2125:2475], CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.786204Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2125:2475], CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.786954Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2125:2475] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 733 Tasks { TaskId: 1 CpuTimeUs: 394 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 384 HostName: "ghrun-tjszgfszoq" NodeId: 4 CreateTimeMs: 1768048166783 CurrentWaitInputTimeUs: 20 UpdateTimeMs: 1768048166784 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:29:26.787025Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2125:2475], CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.787081Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2125:2475], CA [3:2124:3363], trace_id# 2026-01-10T12:29:26.799785Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2124:3363] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 978 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 701 FinishTimeMs: 1768048166798 InputRows: 3 InputBytes: 13 ComputeCpuTimeUs: 143 BuildCpuTimeUs: 558 HostName: "ghrun-tjszgfszoq" NodeId: 3 CreateTimeMs: 1768048166782 UpdateTimeMs: 1768048166798 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:29:26.799913Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2124:3363] 2026-01-10T12:29:26.799981Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2125:2475], trace_id# 2026-01-10T12:29:26.800021Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:2125:2475], trace_id# 2026-01-10T12:29:26.800596Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2125:2475] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1469 DurationUs: 7000 Tasks { TaskId: 1 CpuTimeUs: 568 FinishTimeMs: 1768048166799 OutputRows: 3 OutputBytes: 13 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ResultRows: 3 ResultBytes: 13 ComputeCpuTimeUs: 184 BuildCpuTimeUs: 384 WaitInputTimeUs: 5591 HostName: "ghrun-tjszgfszoq" NodeId: 4 StartTimeMs: 1768048166792 CreateTimeMs: 1768048166783 UpdateTimeMs: 1768048166799 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:29:26.800650Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:2125:2475] 2026-01-10T12:29:26.800738Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:29:26.800800Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-10T12:29:26.800855Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [3:2116:3363] TxId: 281474976710663. Ctx: { TraceId: 01keky41q66w29ba89rpyrdfp5, Database: , SessionId: ydb://session/3?node_id=3&id=YTk4NmU1YzItNTEyN2M5MjEtNWE5MmFiNTUtNzAzMGQwYjU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002447s ReadRows: 3 ReadBytes: 24 RequestUnits# 3 ForceFlag# true trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2026-01-10T12:25:59.256927Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.296404Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.301295Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.307837Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.311837Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:25:59.315586Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:25:59.315685Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.316794Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2026-01-10T12:25:59.316844Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.316962Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.317100Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.333345Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.333417Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.336155Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.336369Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.336509Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.336644Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.336780Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.336935Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.337075Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.337105Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.337203Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2026-01-10T12:25:59.337240Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2026-01-10T12:25:59.337289Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.337351Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.340381Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.340586Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:25:59.343647Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:25:59.343808Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:25:59.344187Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:25:59.344462Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:25:59.345376Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2026-01-10T12:25:59.345416Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:25:59.345485Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:25:59.345590Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:25:59.356537Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:25:59.356611Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:25:59.358611Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.358767Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.358907Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.359062Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.359222Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.359364Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.359501Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:25:59.359528Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:25:59.359599Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2026-01-10T12:25:59.359631Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2026-01-10T12:25:59.359678Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:25:59.359744Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:25:59.360195Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:25:59.362000Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:25:59.364251Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2026-01-10T12:25:59.364331Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.364508Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:25:59.381046Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:25:59.381131Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.389142Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.389191Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:25:59.392520Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.392583Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:25:59.393092Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.393135Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:98:2089] 2026-01-10T12:25:59.393164Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:98:2089] 2026-01-10T12:25:59.393242Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:25:59.397839Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2026-01-10T12:25:59.397900Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:25:59.399480Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:25:59.400179Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:25:59.400274Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:25:59.400344Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:25:59.400472Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:54:2064] 2026-01-10T12:25 ... 6371002f6c] Result# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:29:24.997807Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2543b6371002f6c] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:29:24.998895Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:487:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:29:24.999051Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} commited cookie 1 for step 487 2026-01-10T12:29:25.001090Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-10T12:29:25.001152Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:29:25.001365Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{986, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-10T12:29:25.001419Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:488} Tx{1477, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:29:25.001536Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1353:2259] 2026-01-10T12:29:25.001571Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1353:2259] 2026-01-10T12:29:25.001623Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1289:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.048) 2026-01-10T12:29:25.103830Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-10T12:29:25.103915Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:29:25.104047Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116801152}: tablet 72075186224037972 wasn't changed 2026-01-10T12:29:25.104105Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116801152}: tablet 72075186224037972 skipped channel 0 2026-01-10T12:29:25.104186Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116801152}: tablet 72075186224037972 skipped channel 1 2026-01-10T12:29:25.104228Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116801152}: tablet 72075186224037972 skipped channel 2 2026-01-10T12:29:25.104304Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137087116801152}(72075186224037972)::Execute - TryToBoot was not successfull 2026-01-10T12:29:25.104391Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{987, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-10T12:29:25.104445Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1478, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:29:25.123465Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [d04b90266beab287] bootstrap ActorId# [20:11722:4474] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:488:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:29:25.123605Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [d04b90266beab287] Id# [72057594037927937:2:488:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:29:25.123654Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [d04b90266beab287] restore Id# [72057594037927937:2:488:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:29:25.123705Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [d04b90266beab287] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG33 2026-01-10T12:29:25.123743Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [d04b90266beab287] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:488:0:0:246:1] Marker# BPG32 2026-01-10T12:29:25.123861Z node 20 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [20:329:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:488:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:29:25.127781Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [d04b90266beab287] received {EvVPutResult Status# OK ID# [72057594037927937:2:488:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 505 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 506 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-10T12:29:25.127893Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d04b90266beab287] Result# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:29:25.127942Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [d04b90266beab287] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:29:25.128756Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:29:25.128907Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} commited cookie 1 for step 488 2026-01-10T12:29:25.130490Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-10T12:29:25.130544Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:29:25.130751Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{988, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-10T12:29:25.130798Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:29:25.130906Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1353:2259] 2026-01-10T12:29:25.130940Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1353:2259] 2026-01-10T12:29:25.130988Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1289:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.048) 2026-01-10T12:29:25.233415Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-10T12:29:25.233496Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:29:25.233625Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116799808}: tablet 72075186224037982 wasn't changed 2026-01-10T12:29:25.233665Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116799808}: tablet 72075186224037982 skipped channel 0 2026-01-10T12:29:25.233747Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116799808}: tablet 72075186224037982 skipped channel 1 2026-01-10T12:29:25.233784Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137087116799808}: tablet 72075186224037982 skipped channel 2 2026-01-10T12:29:25.233879Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137087116799808}(72075186224037982)::Execute - TryToBoot was not successfull 2026-01-10T12:29:25.233952Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{989, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-10T12:29:25.234004Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] Test command err: 2026-01-10T12:29:21.148213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:21.183090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:21.183334Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:21.190880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-10T12:29:21.191164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2026-01-10T12:29:21.191299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.191510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:21.191705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:21.191823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:21.191926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:21.192097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:21.192229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:21.192343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:21.192474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:21.192616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:21.192757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:21.192870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:21.192981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:21.227409Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:21.227600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=14;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-10T12:29:21.227664Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.228288Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-10T12:29:21.228451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.228537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.228586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.228871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2026-01-10T12:29:21.228960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2026-01-10T12:29:21.229050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2026-01-10T12:29:21.229137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2026-01-10T12:29:21.229235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:21.229310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-10T12:29:21.229354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-10T12:29:21.229518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:21.229631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:21.229681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:21.229730Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-10T12:29:21.229847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:21.229927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:21.229974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:21.230008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:21.230219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:21.230285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:21.230326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:21.230356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:21.230452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:21.230508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:21.230556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:21.230661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:21.230721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:21.230791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:21.230826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:21.230876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:21.230916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:21.230955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:21.231183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... ternal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:26.687875Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-10T12:29:26.687909Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:26.687943Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:26.688517Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:26.688686Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.688735Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:26.688847Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-10T12:29:26.688895Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-10T12:29:26.689083Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-10T12:29:26.689226Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.689353Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.689513Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.689651Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:26.689765Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.689885Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.690216Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2026-01-10T12:29:26.690743Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.054},{"events":["l_task_result"],"t":0.723},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.725}],"full":{"a":5247293,"name":"_full_task","f":5247293,"d_finished":0,"c":0,"l":5972890,"d":725597},"events":[{"name":"bootstrap","f":5247539,"d_finished":1261,"c":1,"l":5248800,"d":1261},{"a":5972237,"name":"ack","f":5301802,"d_finished":289738,"c":421,"l":5972149,"d":290391},{"a":5972226,"name":"processing","f":5249001,"d_finished":617159,"c":843,"l":5972152,"d":617823},{"name":"ProduceResults","f":5248362,"d_finished":500659,"c":1266,"l":5972509,"d":500659},{"a":5972514,"name":"Finish","f":5972514,"d_finished":0,"c":0,"l":5972890,"d":376},{"name":"task_result","f":5249016,"d_finished":317568,"c":422,"l":5970566,"d":317568}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.690850Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:26.691374Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.054},{"events":["l_task_result"],"t":0.723},{"events":["l_ProduceResults","f_Finish"],"t":0.725},{"events":["l_ack","l_processing","l_Finish"],"t":0.726}],"full":{"a":5247293,"name":"_full_task","f":5247293,"d_finished":0,"c":0,"l":5973502,"d":726209},"events":[{"name":"bootstrap","f":5247539,"d_finished":1261,"c":1,"l":5248800,"d":1261},{"a":5972237,"name":"ack","f":5301802,"d_finished":289738,"c":421,"l":5972149,"d":291003},{"a":5972226,"name":"processing","f":5249001,"d_finished":617159,"c":843,"l":5972152,"d":618435},{"name":"ProduceResults","f":5248362,"d_finished":500659,"c":1266,"l":5972509,"d":500659},{"a":5972514,"name":"Finish","f":5972514,"d_finished":0,"c":0,"l":5973502,"d":988},{"name":"task_result","f":5249016,"d_finished":317568,"c":422,"l":5970566,"d":317568}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:26.691475Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:25.963427Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-10T12:29:26.691526Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:26.691704Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> EvWrite::WriteWithSplit [GOOD] >> Normalizers::ChunksV0MetaNormalizer >> TColumnShardTestReadWrite::WriteReadExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-10T12:29:24.308581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:24.330865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:24.331035Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:24.336583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:24.336816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:24.337029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:24.337110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:24.337176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:24.337240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:24.337308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:24.337404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:24.337484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:24.337552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.337617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:24.337692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:24.337802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:24.359648Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:24.360043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:24.360139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:24.360279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:24.360411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:24.360467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:24.360504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:24.360585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:24.360634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:24.360663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:24.360683Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:24.360794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:24.360830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:24.360872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:24.360898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:24.360979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:24.361019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:24.361052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:24.361070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:24.361104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:24.361182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:24.361211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:24.361246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:24.361273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:24.361327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:24.361493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:24.361529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:24.361547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:24.361616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:24.361662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.361685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.361732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:24.361772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:24.361795Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:24.361842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:24.361866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:24.361883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:24.361983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:24.362027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... ;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:27.738956Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-10T12:29:27.738997Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:27.739037Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:27.739511Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:27.739713Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.739765Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:27.739889Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-10T12:29:27.739945Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-10T12:29:27.740234Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-10T12:29:27.740387Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.740519Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.740659Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.740911Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:27.741031Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.741148Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.741352Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-10T12:29:27.741770Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":3890902,"name":"_full_task","f":3890902,"d_finished":0,"c":0,"l":3900867,"d":9965},"events":[{"name":"bootstrap","f":3891182,"d_finished":1076,"c":1,"l":3892258,"d":1076},{"a":3900356,"name":"ack","f":3898953,"d_finished":1200,"c":1,"l":3900153,"d":1711},{"a":3900345,"name":"processing","f":3892417,"d_finished":2916,"c":3,"l":3900156,"d":3438},{"name":"ProduceResults","f":3891907,"d_finished":1962,"c":6,"l":3900632,"d":1962},{"a":3900636,"name":"Finish","f":3900636,"d_finished":0,"c":0,"l":3900867,"d":231},{"name":"task_result","f":3892432,"d_finished":1660,"c":2,"l":3898522,"d":1660}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.741879Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:27.742373Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":3890902,"name":"_full_task","f":3890902,"d_finished":0,"c":0,"l":3901395,"d":10493},"events":[{"name":"bootstrap","f":3891182,"d_finished":1076,"c":1,"l":3892258,"d":1076},{"a":3900356,"name":"ack","f":3898953,"d_finished":1200,"c":1,"l":3900153,"d":2239},{"a":3900345,"name":"processing","f":3892417,"d_finished":2916,"c":3,"l":3900156,"d":3966},{"name":"ProduceResults","f":3891907,"d_finished":1962,"c":6,"l":3900632,"d":1962},{"a":3900636,"name":"Finish","f":3900636,"d_finished":0,"c":0,"l":3901395,"d":759},{"name":"task_result","f":3892432,"d_finished":1660,"c":2,"l":3898522,"d":1660}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:27.742475Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:27.729334Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-10T12:29:27.742518Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:27.742646Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:10.606943Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.607144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:10.700066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:10.701063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:10.708874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:10.709235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:10.709377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:10.710785Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:10.711016Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:10.711180Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:11.083567Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:11.191777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.191905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.192741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:11.192822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:11.237666Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:29:11.238629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.239210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:11.324801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:11.339093Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:11.998261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:12.067453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.067621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.393322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:13.290895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1912:3226], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.291055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1921:3231], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.291605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.292458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1928:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.292658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:13.298313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:13.884916Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1926:3234], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:29:14.106532Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2037:3312] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:14.529998Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-10T12:29:14.530192Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-01-10T12:29:14.530262Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-10T12:29:14.530320Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-10T12:29:14.530400Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-10T12:29:14.543425Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:291} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-10T12:29:14.558122Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-10T12:29:14.558218Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mp81ye7cp16qekff713, Database: , SessionId: ydb://session/3?node_id=1&id=YTcxNzMxM2QtMzRiOTNlYy1lODQxOGYyYS1mMjAwYjQ5ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 299.422358s cancel_after# trace_id# 2026-01-10T12:29:14.558261Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mp81ye7cp16qekff713, Database: , SessionId: ydb://session/3?node_id=1&id=YTcxNzMxM2QtMzRiOTNlYy1lODQxOGYyYS1mMjAwYjQ5ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-10T12:29:14.558304Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mp81ye7cp16qekff713, Database: , SessionId: ydb://session/3?node_id=1&id=YTcxNzMxM2QtMzRiOTNlYy1lODQxOGYyYS1mMjAwYjQ5ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-10T12:29:14.558358Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-10T12:29:14.558895Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:291} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mp81ye7cp16qekff713, Database: , SessionId: ydb://session/3?node_id=1&id=YTcxNzMxM2QtMzRiOTNlYy1lODQxOGYyYS1mMjAwYjQ5ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-10T12:29:14.559148Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-10T12:29:14.559254Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky3mp81ye7cp16qekff713, Database: , SessionId: ydb://session/3?node_id=1&id=YTcxNzMxM2QtMzRiOTNlYy1lODQxOGYyYS1mMjAwYjQ5ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-10T12:29:14.559566Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:831} ActorId: [1:2063:3224] TxId: 281474976715660. Ctx: { TraceId: 01keky ... Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-10T12:29:27.906529Z node 3 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2744} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-10T12:29:27.906576Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [3:2330:3466] 2026-01-10T12:29:27.906640Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2330:3466], channels: 0 2026-01-10T12:29:27.906714Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2330:3466], trace_id# 2026-01-10T12:29:27.906775Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2330:3466], trace_id# 2026-01-10T12:29:27.906822Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2344} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-10T12:29:27.907683Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2330:3466] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-10T12:29:27.907770Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:879} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2330:3466], trace_id# 2026-01-10T12:29:27.907850Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2330:3466], trace_id# 2026-01-10T12:29:27.908999Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2330:3466] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 601 Tasks { TaskId: 1 CpuTimeUs: 109 FinishTimeMs: 1768048167908 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 24 BuildCpuTimeUs: 85 HostName: "ghrun-tjszgfszoq" NodeId: 3 CreateTimeMs: 1768048167907 UpdateTimeMs: 1768048167908 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-10T12:29:27.909058Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2330:3466] 2026-01-10T12:29:27.909172Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [3:2326:3466] trace_id# 2026-01-10T12:29:27.909245Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1119} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000601s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-10T12:29:27.923766Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [3:2332:3466], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:2317:3466]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2332:3466]. Ignored this error. 2026-01-10T12:29:27.923923Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:2326:3466], SessionActorId: [3:2317:3466], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 . sessionActorId=[3:2317:3466]. 2026-01-10T12:29:27.924341Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, ActorId: [3:2317:3466], ActorState: ExecuteState, LegacyTraceId: 01keky42x634yfpd2dpawhjrwv, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [3:2327:3466] from: [3:2326:3466] trace_id# 2026-01-10T12:29:27.924535Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1066} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got EvAbortExecution Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-10T12:29:27.924631Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-10T12:29:27.924703Z node 3 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1205} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-10T12:29:27.924850Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1344} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: UNAVAILABLE Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } Result { Stats { } } TargetActor# [3:2317:3466] trace_id# 2026-01-10T12:29:27.924920Z node 3 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2817} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shutdown immediately - nothing to wait trace_id# 2026-01-10T12:29:27.925016Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:29:27.925070Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [3:2327:3466] TxId: 281474976710683. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-10T12:29:27.925318Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, ActorId: [3:2317:3466], ActorState: ExecuteState, LegacyTraceId: 01keky42x634yfpd2dpawhjrwv, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } trace_id# 2026-01-10T12:29:27.925534Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-10T12:29:27.926004Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:236} ActorId: [3:2335:3466] TxId: 281474976710684. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [3:2326:3466] trace_id# 2026-01-10T12:29:27.926233Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1487} ActorId: [3:2335:3466] TxId: 281474976710684. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-10T12:29:27.926281Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1502} ActorId: [3:2335:3466] TxId: 281474976710684. Ctx: { TraceId: 01keky42x634yfpd2dpawhjrwv, Database: , SessionId: ydb://session/3?node_id=3&id=NWVjNGJmZS04YWU2ZjgzLTdkM2FmNjRjLTljM2Y5YzU5, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-10T12:29:25.472198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:25.505995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:25.506231Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:25.513756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:25.514017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:25.514253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:25.514384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:25.514499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:25.514609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:25.514726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:25.514858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:25.514992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:25.515121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.515220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:25.515353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:25.515500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:25.547047Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:25.547450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:25.547519Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:25.547713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:25.547879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:25.547952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:25.548018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:25.548140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:25.548208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:25.548257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:25.548288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:25.548480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:25.548554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:25.548597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:25.548670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:25.548774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:25.548836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:25.548883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:25.548936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:25.548981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:25.549099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:25.549140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:25.549208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:25.549262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:25.549302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:25.549540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:25.549617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:25.549657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:25.549851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:25.549899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.549931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:25.549999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:25.550060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:25.550097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:25.550142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:25.550179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:25.550214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:25.550361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:25.550433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... 2,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:28.615659Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-10T12:29:28.615709Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:28.615750Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:28.616248Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:28.616425Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.616473Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:28.616602Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-10T12:29:28.616648Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-10T12:29:28.616836Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-10T12:29:28.616963Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.617058Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.617160Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.617350Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:28.617442Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.617529Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.617671Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-10T12:29:28.617989Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":3633397,"name":"_full_task","f":3633397,"d_finished":0,"c":0,"l":3642858,"d":9461},"events":[{"name":"bootstrap","f":3633663,"d_finished":1054,"c":1,"l":3634717,"d":1054},{"a":3642484,"name":"ack","f":3641375,"d_finished":950,"c":1,"l":3642325,"d":1324},{"a":3642475,"name":"processing","f":3634871,"d_finished":2696,"c":3,"l":3642328,"d":3079},{"name":"ProduceResults","f":3634366,"d_finished":1716,"c":6,"l":3642691,"d":1716},{"a":3642694,"name":"Finish","f":3642694,"d_finished":0,"c":0,"l":3642858,"d":164},{"name":"task_result","f":3634885,"d_finished":1692,"c":2,"l":3640920,"d":1692}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.618036Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:28.618308Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":3633397,"name":"_full_task","f":3633397,"d_finished":0,"c":0,"l":3643219,"d":9822},"events":[{"name":"bootstrap","f":3633663,"d_finished":1054,"c":1,"l":3634717,"d":1054},{"a":3642484,"name":"ack","f":3641375,"d_finished":950,"c":1,"l":3642325,"d":1685},{"a":3642475,"name":"processing","f":3634871,"d_finished":2696,"c":3,"l":3642328,"d":3440},{"name":"ProduceResults","f":3634366,"d_finished":1716,"c":6,"l":3642691,"d":1716},{"a":3642694,"name":"Finish","f":3642694,"d_finished":0,"c":0,"l":3643219,"d":525},{"name":"task_result","f":3634885,"d_finished":1692,"c":2,"l":3640920,"d":1692}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:28.618369Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:28.606573Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-10T12:29:28.618396Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:28.618501Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] |95.9%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2026-01-10T12:28:57.054768Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708928957327919:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:57.054827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:57.363413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:57.363531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:57.385450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:57.388909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:57.422066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708928957327873:2081] 1768048137047430 != 1768048137047433 2026-01-10T12:28:57.430274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:57.550771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:57.690400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:57.690447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:57.690455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:57.690545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.064359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:58.071735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.082501Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:28:58.082576Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5b963f07d0] Connect to grpc://localhost:10888 2026-01-10T12:28:58.087170Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:28:58.106229Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b963f07d0] Status 14 Service Unavailable 2026-01-10T12:28:58.106454Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:28:58.106512Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:28:58.106607Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:28:58.106925Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:28:58.109196Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b963f07d0] Status 14 Service Unavailable 2026-01-10T12:28:58.109342Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:28:58.109381Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:28:59.077467Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:28:59.077621Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:28:59.077981Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:28:59.081214Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b963f07d0] Status 14 Service Unavailable 2026-01-10T12:28:59.081386Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:28:59.081438Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:01.078417Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-10T12:29:01.078536Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:29:01.078896Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:29:01.081948Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5b963f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:29:01.082162Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-10T12:29:02.054830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708928957327919:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:02.054922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:10.733193Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708982465909051:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:10.733274Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:10.754498Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:10.806613Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:10.810415Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708982465909024:2081] 1768048150732261 != 1768048150732264 2026-01-10T12:29:10.853818Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:10.853903Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:10.857514Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:10.897539Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:10.897564Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:10.897572Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:10.897695Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:11.027487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:11.092988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:11.099430Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:29:11.099494Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5b964b6050] Connect to grpc://localhost:19027 2026-01-10T12:29:11.100457Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b964b6050] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:29:11.108515Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b964b6050] Status 14 Service Unavailable 2026-01-10T12:29:11.108650Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:29:11.108698Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:11.108765Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:29:11.109017Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b964b6050] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } a ... cket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-10T12:29:18.313298Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:18.313941Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-10T12:29:18.314180Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963fafd0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:18.315928Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5b963fafd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-10T12:29:18.316051Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-10T12:29:18.316160Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2026-01-10T12:29:21.938544Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593709033340008001:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:21.938599Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:21.954120Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:22.055743Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:22.055852Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:22.057042Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593709033340007974:2081] 1768048161938040 != 1768048161938043 2026-01-10T12:29:22.066281Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:22.068743Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:22.100250Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:29:22.100378Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:29:22.114795Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:22.114819Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:22.114826Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:22.114888Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:22.149476Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:22.365015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:22.371894Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-10T12:29:22.371970Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5b963f70d0] Connect to grpc://localhost:6495 2026-01-10T12:29:22.373130Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f70d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:22.381138Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b963f70d0] Status 14 Service Unavailable 2026-01-10T12:29:22.381306Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:29:22.381331Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-10T12:29:22.381361Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:22.381467Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-10T12:29:22.381810Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b963f70d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:22.384663Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d5b963f70d0] Status 1 CANCELLED 2026-01-10T12:29:22.384806Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-10T12:29:22.384824Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-10T12:29:22.384860Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-10T12:29:26.016667Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7593709054396419349:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:26.016764Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:26.032008Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:26.109311Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:26.110927Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7593709054396419325:2081] 1768048166016278 != 1768048166016281 2026-01-10T12:29:26.150655Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:26.150747Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:26.153817Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:26.185969Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:26.185991Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:26.185997Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:26.186072Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:26.329211Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:26.435994Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:26.443279Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-10T12:29:26.443357Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5b964597d0] Connect to grpc://localhost:31340 2026-01-10T12:29:26.444442Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b964597d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-10T12:29:26.453038Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5b964597d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:29:26.453312Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:26.454083Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-10T12:29:26.454370Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5b964597d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-10T12:29:26.456000Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5b964597d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-10T12:29:26.456295Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |95.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TColumnShardTestReadWrite::WriteReadZSTD >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> Normalizers::InsertedPortionsCleanerNormalizer >> Normalizers::ChunksV0MetaNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2026-01-10T12:28:57.053364Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708928753665397:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:57.053411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:57.310200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:57.382343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:57.382449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:57.456238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708928753665360:2081] 1768048137045866 != 1768048137045869 2026-01-10T12:28:57.472329Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:57.477900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:57.524254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:57.690470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:57.690570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:57.690581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:57.690659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.063862Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:58.071891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.134491Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 2026-01-10T12:28:58.143908Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:28:58.143945Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:28:58.144630Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****mlaA (A9B17452) () has now retryable error message 'Security state is empty' 2026-01-10T12:28:58.144896Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:28:58.144932Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:28:58.145152Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****mlaA (A9B17452) () has now retryable error message 'Security state is empty' 2026-01-10T12:28:58.145170Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-10T12:28:58.145181Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-10T12:28:58.145201Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket eyJh****mlaA (A9B17452): Security state is empty 2026-01-10T12:29:00.066134Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****mlaA (A9B17452) 2026-01-10T12:29:00.066447Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:29:00.066466Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:00.066742Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****mlaA (A9B17452) () has now retryable error message 'Security state is empty' 2026-01-10T12:29:00.066758Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-10T12:29:01.145653Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:02.053605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708928753665397:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:02.053735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:03.067865Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****mlaA (A9B17452) 2026-01-10T12:29:03.068125Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:29:03.068146Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:03.069225Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****mlaA (A9B17452) () has now valid token of user1 2026-01-10T12:29:03.069254Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-10T12:29:07.072717Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****mlaA (A9B17452) 2026-01-10T12:29:07.073136Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****mlaA (A9B17452) () has now valid token of user1 2026-01-10T12:29:08.800572Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708976643880937:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:08.800619Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:08.815154Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:08.912655Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:08.927484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:08.927569Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:08.934718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:08.992592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:08.992620Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:08.992627Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:08.992703Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:08.995720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:09.208723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:09.215009Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:09.215106Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0db8597650] Connect to grpc://localhost:28954 2026-01-10T12:29:09.218491Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8597650] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-10T12:29:09.228725Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0db8597650] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-10T12:29:09.229058Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:12.022269Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708994240789699:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:12.022322Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:12.030046Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:12.124870Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:12.126632Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708994240789672:2081] 1768048152021486 != 1768048152021489 2026-01-10T12:29:12.137464Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:12.171538Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:12.171672Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:12.177600Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:12.218759Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:12.218786Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:12.218792Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: fai ... Service] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:12.400711Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:12.407621Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:12.407696Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0db8511f50] Connect to grpc://localhost:28351 2026-01-10T12:29:12.408543Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8511f50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-10T12:29:12.415839Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0db8511f50] Status 14 Service Unavailable 2026-01-10T12:29:12.416000Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:12.416065Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:12.416224Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8511f50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-10T12:29:12.419143Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0db8511f50] Status 14 Service Unavailable 2026-01-10T12:29:12.419383Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:13.049852Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-10T12:29:13.049892Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:13.050123Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:13.050214Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8511f50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-10T12:29:13.051876Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0db8511f50] Status 14 Service Unavailable 2026-01-10T12:29:13.051976Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:14.050316Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-10T12:29:14.050365Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:14.050794Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8511f50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-10T12:29:14.052633Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0db8511f50] Status 14 Service Unavailable 2026-01-10T12:29:14.052744Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:16.051173Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-10T12:29:16.051198Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:16.051316Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db8511f50] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-10T12:29:16.053480Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0db8511f50] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-10T12:29:16.053609Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:17.022518Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7593708994240789699:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:17.022695Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:25.044777Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593709048114403407:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:25.044835Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:25.057432Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:25.133852Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593709048114403379:2081] 1768048165043959 != 1768048165043962 2026-01-10T12:29:25.142180Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:25.178056Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:25.178147Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:25.179512Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:25.219351Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:25.219367Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:25.219372Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:25.219422Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:25.312818Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:25.397654Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:25.427878Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:25.427939Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0db854f350] Connect to grpc://localhost:65500 2026-01-10T12:29:25.428811Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db854f350] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-10T12:29:25.435838Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0db854f350] Status 14 Service Unavailable 2026-01-10T12:29:25.435965Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-10T12:29:25.435990Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-10T12:29:25.436127Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0db854f350] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-10T12:29:25.437700Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0db854f350] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-10T12:29:25.437915Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-10T12:29:26.051032Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:28.044462Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593709061636858165:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:28.044583Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:28.065073Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:28.154348Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:28.167623Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:28.167706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:28.188811Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:28.252736Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:28.252770Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:28.252793Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:28.252880Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:28.303625Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:28.548966Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:28.555347Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:979: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2026-01-10T12:29:27.279598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:27.311558Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:27.311823Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:27.319878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:27.320173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:27.320424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:27.320547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:27.320654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:27.320791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:27.320910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:27.321033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:27.321157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:27.321271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:27.321404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:27.321550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:27.321669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:27.352900Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:27.353282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:27.353341Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:27.353529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:27.353694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:27.353770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:27.353816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:27.353962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:27.354036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:27.354084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:27.354123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:27.354311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:27.354383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:27.354434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:27.354473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:27.354600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:27.354660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:27.354707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:27.354743Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:27.354798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:27.354841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:27.354875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:27.354919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:27.354957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:27.354986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:27.355239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:27.355412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:27.355454Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:27.355592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:27.355634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:27.355667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:27.355761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:27.355816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:27.355848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:27.355895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:27.355935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:27.355968Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:27.356154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:27.356224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:31.022534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:31.022874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:31.023080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.023245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.023419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.023655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:31.023842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.024031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.024429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-10T12:29:31.024970Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":4189878,"name":"_full_task","f":4189878,"d_finished":0,"c":0,"l":4205836,"d":15958},"events":[{"name":"bootstrap","f":4190162,"d_finished":1811,"c":1,"l":4191973,"d":1811},{"a":4204970,"name":"ack","f":4203208,"d_finished":1574,"c":1,"l":4204782,"d":2440},{"a":4204951,"name":"processing","f":4192143,"d_finished":4565,"c":3,"l":4204785,"d":5450},{"name":"ProduceResults","f":4191461,"d_finished":2708,"c":6,"l":4205398,"d":2708},{"a":4205402,"name":"Finish","f":4205402,"d_finished":0,"c":0,"l":4205836,"d":434},{"name":"task_result","f":4192166,"d_finished":2901,"c":2,"l":4202966,"d":2901}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.025057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:31.025553Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":4189878,"name":"_full_task","f":4189878,"d_finished":0,"c":0,"l":4206440,"d":16562},"events":[{"name":"bootstrap","f":4190162,"d_finished":1811,"c":1,"l":4191973,"d":1811},{"a":4204970,"name":"ack","f":4203208,"d_finished":1574,"c":1,"l":4204782,"d":3044},{"a":4204951,"name":"processing","f":4192143,"d_finished":4565,"c":3,"l":4204785,"d":6054},{"name":"ProduceResults","f":4191461,"d_finished":2708,"c":6,"l":4205398,"d":2708},{"a":4205402,"name":"Finish","f":4205402,"d_finished":0,"c":0,"l":4206440,"d":1038},{"name":"task_result","f":4192166,"d_finished":2901,"c":2,"l":4202966,"d":2901}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:31.025669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:31.004683Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-10T12:29:31.025719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:31.025987Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2026-01-10T12:28:57.053718Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708927604138124:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:28:57.053789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:28:57.311680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:28:57.361908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:57.362040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:57.449686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:57.452051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:57.596841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:28:57.690402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:57.690506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:57.690515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:57.690609Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:58.063602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:28:58.071635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:58.119034Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:28:58.136873Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:28:58.136932Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:28:58.138190Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****1b9Q (E02D7E94) () has now valid token of user1 2026-01-10T12:28:58.138201Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-10T12:29:00.566138Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708939875622833:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:00.566219Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:00.575921Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:00.685174Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:00.688631Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:00.690567Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593708939875622806:2081] 1768048140564982 != 1768048140564985 2026-01-10T12:29:00.725544Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:00.725645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:00.738429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:00.809925Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:00.809953Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:00.809979Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:00.810055Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:00.854997Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:01.015117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:01.143602Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:01.151741Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:29:01.151776Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:01.152597Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zC_A (F5F457A7) () has now valid token of user1 2026-01-10T12:29:01.152618Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-10T12:29:04.252209Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7593708960487827496:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:04.252294Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:04.264673Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:04.356430Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:04.368323Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7593708960487827469:2081] 1768048144251256 != 1768048144251259 2026-01-10T12:29:04.393182Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:04.393263Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:04.401072Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:04.430855Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:04.430880Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:04.430887Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:04.430979Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:04.548955Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:04.611389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:04.787437Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-10T12:29:04.787471Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-10T12:29:04.787530Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****2FAg (9B11A443)) 2026-01-10T12:29:05.282660Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:05.788427Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db1 keys 1 2026-01-10T12:29:05.788455Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db1 2026-01-10T12:29:05.788742Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-10T12:29:05.788769Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-10T12:29:05.789681Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****2FAg (9B11A443) () has now valid token of user1 2026-01-10T12:29:05.789699Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db1, A4 success 2026-01-10T12:29:05.827756Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-10T12:29:05.827799Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-10T12:29:05.827846Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db2, login state is not available yet, deffer token (eyJh****OEEg (EBD69105)) 2026-01-10T12:29:06.827700Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db2 keys 1 2026-01-10T12:29:06.827746Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db2 2026-01-10T12:29:06.827980Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-10T12:29:06.828002Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-10T12:29:06.828687Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****OEEg (EBD69105) () has now valid token of user1 2026-01-10T12:29:06.828717Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db2, A4 success 2026-01-10T12:29:07.674376Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7593708969733883862:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:07.674453Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:07.687655Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:07.778626Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:07.790708Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593708969733883835:2081] 1768048147673290 != 1768048147673293 2026-01-10T12:29:07.797372Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:07.797485Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:07.822478Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:07.905029Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:07.905057Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:07.905067Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:07.905169Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:07.944536Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:08.105890Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:08.210635Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:08.220003Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-10T12:29:08.220058Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-10T12:29:08.220958Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zM1A (E95001D6) () has now valid token of user1 2026-01-10T12:29:08.220993Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-10T12:29:08.221362Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:08.678759Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:12.674997Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593708969733883862:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:12.675137Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:13.677962Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****zM1A (E95001D6) 2026-01-10T12:29:13.678519Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zM1A (E95001D6) () has now valid token of user1 2026-01-10T12:29:16.679738Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****zM1A (E95001D6) 2026-01-10T12:29:16.680123Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zM1A (E95001D6) () has now valid token of user1 2026-01-10T12:29:18.221831Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:21.682455Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****zM1A (E95001D6) 2026-01-10T12:29:21.682764Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zM1A (E95001D6) () has now valid token of user1 2026-01-10T12:29:22.763896Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:29:22.763933Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:26.685003Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****zM1A (E95001D6) 2026-01-10T12:29:26.685397Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****zM1A (E95001D6) () has now valid token of user1 2026-01-10T12:29:28.774722Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7593709060267773445:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:28.774861Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:28.789483Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:28.875515Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:28.877308Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7593709060267773417:2081] 1768048168773762 != 1768048168773765 2026-01-10T12:29:28.903760Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:28.903874Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:28.920873Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:28.988871Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:28.988897Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:28.988906Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:28.989026Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:29.059002Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:29.228069Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:29.277695Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-10T12:29:29.292915Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:987: Ticket **** (00000000): Ticket is empty |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |95.9%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteStandalone >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> Normalizers::InsertedPortionsCleanerNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer >> KqpBatchUpdate::ManyPartitions_1 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::ReadWithProgram >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2026-01-10T12:29:23.633155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:23.660393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:23.660582Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:23.666465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:23.666673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:23.666844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:23.666930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:23.666991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:23.667054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:23.667145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:23.667219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:23.667286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:23.667352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.667455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:23.667534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:23.667636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:23.688152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:23.688302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:23.688356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:23.688506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.688633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:23.688706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:23.688742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:23.688822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:23.688879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:23.688916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:23.688938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:23.689071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.689122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:23.689173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:23.689210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:23.689281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:23.689322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:23.689355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:23.689382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:23.689423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:23.689463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:23.689498Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:23.689531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:23.689557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:23.689577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:23.689728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:23.689812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:23.689865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:23.690006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:23.690045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.690074Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.690114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:23.690146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:23.690172Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:23.690202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:23.690226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:23.690254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:23.690349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:23.690412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:32.868301Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-10T12:29:32.868325Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:32.868346Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:32.868653Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:32.868754Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.868795Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:32.868879Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-10T12:29:32.868912Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-10T12:29:32.869019Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:318:2319];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-10T12:29:32.869099Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.869178Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.869304Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.869388Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:32.869448Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.869512Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.869810Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:320:2320] finished for tablet 9437184 2026-01-10T12:29:32.870240Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:318:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.064},{"events":["l_task_result"],"t":0.655},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.656}],"full":{"a":9020470,"name":"_full_task","f":9020470,"d_finished":0,"c":0,"l":9677087,"d":656617},"events":[{"name":"bootstrap","f":9020788,"d_finished":1744,"c":1,"l":9022532,"d":1744},{"a":9676574,"name":"ack","f":9084633,"d_finished":257430,"c":421,"l":9676523,"d":257943},{"a":9676567,"name":"processing","f":9022814,"d_finished":546598,"c":843,"l":9676525,"d":547118},{"name":"ProduceResults","f":9021955,"d_finished":439812,"c":1266,"l":9676726,"d":439812},{"a":9676731,"name":"Finish","f":9676731,"d_finished":0,"c":0,"l":9677087,"d":356},{"name":"task_result","f":9022836,"d_finished":280437,"c":422,"l":9675556,"d":280437}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.870328Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:318:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:32.870681Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:318:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.064},{"events":["l_task_result"],"t":0.655},{"events":["l_ProduceResults","f_Finish"],"t":0.656},{"events":["l_ack","l_processing","l_Finish"],"t":0.657}],"full":{"a":9020470,"name":"_full_task","f":9020470,"d_finished":0,"c":0,"l":9677562,"d":657092},"events":[{"name":"bootstrap","f":9020788,"d_finished":1744,"c":1,"l":9022532,"d":1744},{"a":9676574,"name":"ack","f":9084633,"d_finished":257430,"c":421,"l":9676523,"d":258418},{"a":9676567,"name":"processing","f":9022814,"d_finished":546598,"c":843,"l":9676525,"d":547593},{"name":"ProduceResults","f":9021955,"d_finished":439812,"c":1266,"l":9676726,"d":439812},{"a":9676731,"name":"Finish","f":9676731,"d_finished":0,"c":0,"l":9677562,"d":831},{"name":"task_result","f":9022836,"d_finished":280437,"c":422,"l":9675556,"d":280437}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:32.870752Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:32.211451Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-10T12:29:32.870793Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:32.870930Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::ReadWithProgramNoProjection >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 14272, MsgBus: 9343 2026-01-10T12:25:09.840136Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707948817058194:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.840194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.077483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.196707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.196819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.198916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.245073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.312203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.431229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.431274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.431283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.431365Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.854022Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.061305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.071268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:25:11.118727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.267498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.438433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.517036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.237024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965996929214:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.237127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.239863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965996929223:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.239941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.732421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.767430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.800365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.838931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.884888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.966148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.045065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.125131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970291897398:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.125222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.125344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970291897403:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.125523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707970291897404:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.125587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.128710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.137711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707970291897407:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:25:14.201355Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707970291897458:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.848181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707948817058194:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.849863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:25:15.987012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 32534, MsgBus: 8552 2026-01-10T12:29:19.854214Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7593709024751581031:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:19.854282Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:19.973640Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:20.048369Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:20.100036Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:20.100215Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:20.106040Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:20.127894Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:20.175688Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:20.175729Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:20.175747Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:20.175884Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:20.864423Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:21.109707Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:21.129366Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:21.227310Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:21.556568Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:21.682183Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:24.854286Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7593709024751581031:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:24.854403Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:26.693818Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593709054816353957:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:26.693981Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:26.694300Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593709054816353966:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:26.694366Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:26.832759Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:26.887011Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:26.939645Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:26.995689Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.052939Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.116319Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.174048Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.252798Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.381456Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593709059111322150:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:27.381499Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593709059111322155:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:27.381605Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:27.381935Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7593709059111322158:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:27.382025Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:27.387779Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:27.402297Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7593709059111322157:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:27.460939Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7593709059111322210:3788] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:30.413669Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2026-01-10T12:29:30.928468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:30.953080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:30.953279Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:30.960311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:30.960570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:30.960808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:30.960935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:30.961048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:30.961177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:30.961314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:30.961455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:30.961587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:30.961686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:30.961831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:30.961977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:30.962095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:30.990782Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:30.991214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:30.991295Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:30.991514Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:30.991699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:30.991778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:30.991827Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:30.991953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:30.992032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:30.992121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:30.992167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:30.992385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:30.992489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:30.992540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:30.992586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:30.992715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:30.992785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:30.992832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:30.992869Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:30.992924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:30.992968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:30.993003Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:30.993052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:30.993092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:30.993129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:30.993400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:30.993557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:30.993598Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:30.993771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:30.993835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:30.993895Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:30.993982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:30.994042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:30.994081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:30.994146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:30.994197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:30.994249Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:30.994401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:30.994464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... urce_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:34.901185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:34.901482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-10T12:29:34.901693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.901886Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.902081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.902300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:34.902483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.902662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.903043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-10T12:29:34.903612Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":4419371,"name":"_full_task","f":4419371,"d_finished":0,"c":0,"l":4434776,"d":15405},"events":[{"name":"bootstrap","f":4419623,"d_finished":1769,"c":1,"l":4421392,"d":1769},{"a":4433947,"name":"ack","f":4432213,"d_finished":1570,"c":1,"l":4433783,"d":2399},{"a":4433933,"name":"processing","f":4421568,"d_finished":4481,"c":3,"l":4433786,"d":5324},{"name":"ProduceResults","f":4420888,"d_finished":2770,"c":6,"l":4434363,"d":2770},{"a":4434368,"name":"Finish","f":4434368,"d_finished":0,"c":0,"l":4434776,"d":408},{"name":"task_result","f":4421589,"d_finished":2843,"c":2,"l":4431990,"d":2843}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.903703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:34.904269Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":4419371,"name":"_full_task","f":4419371,"d_finished":0,"c":0,"l":4435424,"d":16053},"events":[{"name":"bootstrap","f":4419623,"d_finished":1769,"c":1,"l":4421392,"d":1769},{"a":4433947,"name":"ack","f":4432213,"d_finished":1570,"c":1,"l":4433783,"d":3047},{"a":4433933,"name":"processing","f":4421568,"d_finished":4481,"c":3,"l":4433786,"d":5972},{"name":"ProduceResults","f":4420888,"d_finished":2770,"c":6,"l":4434363,"d":2770},{"a":4434368,"name":"Finish","f":4434368,"d_finished":0,"c":0,"l":4435424,"d":1056},{"name":"task_result","f":4421589,"d_finished":2843,"c":2,"l":4431990,"d":2843}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:34.904350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:34.883823Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-10T12:29:34.904396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:34.904660Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> Normalizers::EmptyTablesNormalizer [GOOD] >> EvWrite::WriteWithLock [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2026-01-10T12:29:36.341774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:36.378720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:36.378991Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:36.387374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:36.387659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:36.387921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:36.388052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:36.388229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:36.388354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:36.388473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:36.388616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:36.388745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:36.388876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.389029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:36.389162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:36.389322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:36.423611Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:36.424030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:36.424118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:36.424320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.424532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:36.424623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:36.424674Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:36.424815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:36.424904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:36.424956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:36.424996Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:36.425195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.425273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:36.425325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:36.425375Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:36.425490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:36.425556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:36.425607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:36.425644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:36.425731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:36.425785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:36.425828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:36.425895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:36.425972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:36.426008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:36.426257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:36.426389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:36.426430Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:36.426645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:36.426701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.426737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.426824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:36.426884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:36.426919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:36.426973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:36.427017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:36.427065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:36.427231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:36.427300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;source_idx=0;tablet_id=9437184;fline=source.cpp:346;source_idx=0; 2026-01-10T12:29:37.276507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=source.cpp:346;source_idx=0; 2026-01-10T12:29:37.276638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-10T12:29:37.276690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-10T12:29:37.276774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-10T12:29:37.276911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2026-01-10T12:29:37.276963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-10T12:29:37.276994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-10T12:29:37.277115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-10T12:29:37.277209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Calculation; 2026-01-10T12:29:37.278212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Filter; 2026-01-10T12:29:37.278291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=4; 2026-01-10T12:29:37.278402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:25;event=empty_result;scan_step_idx=5; 2026-01-10T12:29:37.278513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-10T12:29:37.278547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-10T12:29:37.278593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2026-01-10T12:29:37.278630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2026-01-10T12:29:37.278757Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.278885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.279025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:37.279175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.279279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.279523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:352:2364] finished for tablet 9437184 2026-01-10T12:29:37.279905Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:351:2363];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.005}],"full":{"a":1469213,"name":"_full_task","f":1469213,"d_finished":0,"c":0,"l":1474990,"d":5777},"events":[{"name":"bootstrap","f":1469433,"d_finished":979,"c":1,"l":1470412,"d":979},{"a":1474424,"name":"ack","f":1474424,"d_finished":0,"c":0,"l":1474990,"d":566},{"a":1474414,"name":"processing","f":1470523,"d_finished":1527,"c":2,"l":1474322,"d":2103},{"name":"ProduceResults","f":1470045,"d_finished":902,"c":4,"l":1474707,"d":902},{"a":1474710,"name":"Finish","f":1474710,"d_finished":0,"c":0,"l":1474990,"d":280},{"name":"task_result","f":1470536,"d_finished":1499,"c":2,"l":1474319,"d":1499}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.279977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:351:2363];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:37.280343Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:351:2363];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":1469213,"name":"_full_task","f":1469213,"d_finished":0,"c":0,"l":1475422,"d":6209},"events":[{"name":"bootstrap","f":1469433,"d_finished":979,"c":1,"l":1470412,"d":979},{"a":1474424,"name":"ack","f":1474424,"d_finished":0,"c":0,"l":1475422,"d":998},{"a":1474414,"name":"processing","f":1470523,"d_finished":1527,"c":2,"l":1474322,"d":2535},{"name":"ProduceResults","f":1470045,"d_finished":902,"c":4,"l":1474707,"d":902},{"a":1474710,"name":"Finish","f":1474710,"d_finished":0,"c":0,"l":1475422,"d":712},{"name":"task_result","f":1470536,"d_finished":1499,"c":2,"l":1474319,"d":1499}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.280438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:37.272097Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:29:37.280480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:37.280619Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2026-01-10T12:29:32.181371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:32.215296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:32.215568Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:32.223226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:32.223503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:32.223766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:32.223904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:32.224048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:32.224227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:32.224355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:32.224486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:32.224632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:32.224738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:32.224905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:32.225033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:32.225142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:32.256810Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:32.257163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:32.257244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:32.257416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:32.257584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:32.257657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:32.257699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:32.257826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:32.257919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:32.257964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:32.257996Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:32.258200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:32.258272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:32.258317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:32.258376Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:32.258490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:32.258547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:32.258606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:32.258642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:32.258691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:32.258732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:32.258766Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:32.258812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:32.258852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:32.258879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:32.259106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:32.259231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:32.259271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:32.259423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:32.259479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:32.259533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:32.259592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:32.259646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:32.259675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:32.259720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:32.259760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:32.259791Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:32.259941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:32.260004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:37.238236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:37.238474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:37.238610Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.238729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.238850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.239012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:37.239153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.239287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.239585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-10T12:29:37.239949Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":5539279,"name":"_full_task","f":5539279,"d_finished":0,"c":0,"l":5550036,"d":10757},"events":[{"name":"bootstrap","f":5539486,"d_finished":1293,"c":1,"l":5540779,"d":1293},{"a":5549398,"name":"ack","f":5548143,"d_finished":1130,"c":1,"l":5549273,"d":1768},{"a":5549389,"name":"processing","f":5540891,"d_finished":3133,"c":3,"l":5549275,"d":3780},{"name":"ProduceResults","f":5540384,"d_finished":2000,"c":6,"l":5549719,"d":2000},{"a":5549724,"name":"Finish","f":5549724,"d_finished":0,"c":0,"l":5550036,"d":312},{"name":"task_result","f":5540906,"d_finished":1960,"c":2,"l":5547967,"d":1960}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.240008Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:37.240401Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":5539279,"name":"_full_task","f":5539279,"d_finished":0,"c":0,"l":5550471,"d":11192},"events":[{"name":"bootstrap","f":5539486,"d_finished":1293,"c":1,"l":5540779,"d":1293},{"a":5549398,"name":"ack","f":5548143,"d_finished":1130,"c":1,"l":5549273,"d":2203},{"a":5549389,"name":"processing","f":5540891,"d_finished":3133,"c":3,"l":5549275,"d":4215},{"name":"ProduceResults","f":5540384,"d_finished":2000,"c":6,"l":5549719,"d":2000},{"a":5549724,"name":"Finish","f":5549724,"d_finished":0,"c":0,"l":5550471,"d":747},{"name":"task_result","f":5540906,"d_finished":1960,"c":2,"l":5547967,"d":1960}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.240477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:37.226346Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2026-01-10T12:29:37.240508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:37.240688Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2026-01-10T12:29:35.440918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:35.474301Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:35.474524Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:35.481619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:35.481807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:35.482035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:35.482149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:35.482213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:35.482274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:35.482376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:35.482444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:35.482516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:35.482593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.482706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:35.482775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:35.482862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:35.505811Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:35.506030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:35.506097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:35.506280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:35.506504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:35.506602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:35.506652Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:35.506740Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:35.506802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:35.506852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:35.506884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:35.507071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:35.507148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:35.507202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:35.507244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:35.507310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:35.507350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:35.507405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:35.507441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:35.507494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:35.507548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:35.507579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:35.507607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:35.507634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:35.507661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:35.507883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:35.507985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:35.508027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:35.508181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:35.508232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.508273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.508336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:35.508385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:35.508426Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:35.508459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:35.508496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:35.508562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:35.508701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:35.508759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lts:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-10T12:29:37.862979Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=54;finished=1; 2026-01-10T12:29:37.863010Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:29:37.863043Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:29:37.863248Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:37.863393Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:54;schema=key: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.863441Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:37.863557Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=54; 2026-01-10T12:29:37.863612Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=54;batch_columns=key,field; 2026-01-10T12:29:37.863752Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2026-01-10T12:29:37.863870Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.863979Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.864189Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.864355Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:37.864482Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.864589Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.864918Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:207:2219] finished for tablet 9437184 2026-01-10T12:29:37.865470Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:206:2218];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.033},{"events":["l_task_result"],"t":0.184},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.185}],"full":{"a":2641537,"name":"_full_task","f":2641537,"d_finished":0,"c":0,"l":2827519,"d":185982},"events":[{"name":"bootstrap","f":2641764,"d_finished":1369,"c":1,"l":2643133,"d":1369},{"a":2826867,"name":"ack","f":2674641,"d_finished":66049,"c":86,"l":2826775,"d":66701},{"a":2826855,"name":"processing","f":2643283,"d_finished":141936,"c":173,"l":2826778,"d":142600},{"name":"ProduceResults","f":2642633,"d_finished":112279,"c":261,"l":2827141,"d":112279},{"a":2827147,"name":"Finish","f":2827147,"d_finished":0,"c":0,"l":2827519,"d":372},{"name":"task_result","f":2643306,"d_finished":73446,"c":87,"l":2825592,"d":73446}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.865560Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:37.866108Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:206:2218];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.033},{"events":["l_task_result"],"t":0.184},{"events":["l_ProduceResults","f_Finish"],"t":0.185},{"events":["l_ack","l_processing","l_Finish"],"t":0.186}],"full":{"a":2641537,"name":"_full_task","f":2641537,"d_finished":0,"c":0,"l":2828135,"d":186598},"events":[{"name":"bootstrap","f":2641764,"d_finished":1369,"c":1,"l":2643133,"d":1369},{"a":2826867,"name":"ack","f":2674641,"d_finished":66049,"c":86,"l":2826775,"d":67317},{"a":2826855,"name":"processing","f":2643283,"d_finished":141936,"c":173,"l":2826778,"d":143216},{"name":"ProduceResults","f":2642633,"d_finished":112279,"c":261,"l":2827141,"d":112279},{"a":2827147,"name":"Finish","f":2827147,"d_finished":0,"c":0,"l":2828135,"d":988},{"name":"task_result","f":2643306,"d_finished":73446,"c":87,"l":2825592,"d":73446}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:37.866232Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:37.677447Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2026-01-10T12:29:37.866280Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:37.866446Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2026-01-10T12:29:33.309425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:33.343389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:33.343605Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:33.350958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertedPortions; 2026-01-10T12:29:33.351200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:33.351430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:33.351611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:33.351715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:33.351827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:33.351953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:33.352112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:33.352223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:33.352319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:33.352482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:33.352597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:33.352726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:33.352841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:33.385059Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:33.385271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanInsertedPortions; 2026-01-10T12:29:33.385340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-10T12:29:33.385634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2026-01-10T12:29:33.385732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2026-01-10T12:29:33.385841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2026-01-10T12:29:33.385940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2026-01-10T12:29:33.386115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertedPortions;id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:33.386192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-10T12:29:33.386236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-10T12:29:33.386419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:33.386552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:33.386622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:33.386663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-10T12:29:33.386761Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:33.386819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:33.386872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:33.386918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:33.387092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:33.387147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:33.387197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:33.387246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:33.387347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:33.387413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:33.387452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:33.387478Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:33.387526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:33.387562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:33.387589Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:33.387692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:33.387805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:33.387849Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:33.388092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:33.388151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:33.388197Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:33.388329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:33.388374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:33.388403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cp ... e=common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=22; 2026-01-10T12:29:37.758324Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=77; 2026-01-10T12:29:37.758443Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=55; 2026-01-10T12:29:37.758524Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=35; 2026-01-10T12:29:37.758594Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=27; 2026-01-10T12:29:37.758642Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-10T12:29:37.758680Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=5; 2026-01-10T12:29:37.758709Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=504; 2026-01-10T12:29:37.758803Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=46; 2026-01-10T12:29:37.758888Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=50; 2026-01-10T12:29:37.758996Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=67; 2026-01-10T12:29:37.759086Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=58; 2026-01-10T12:29:37.759251Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=135; 2026-01-10T12:29:37.767999Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8672; 2026-01-10T12:29:37.768123Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=41; 2026-01-10T12:29:37.768171Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-10T12:29:37.768200Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:29:37.768282Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2026-01-10T12:29:37.768312Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:29:37.768382Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2026-01-10T12:29:37.768412Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:29:37.768473Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-10T12:29:37.768610Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=66; 2026-01-10T12:29:37.768713Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=54; 2026-01-10T12:29:37.768771Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18325; 2026-01-10T12:29:37.769061Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:37.769161Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:37.769220Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:37.769287Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:37.769341Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:37.769388Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:37.769460Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:29:37.769521Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:37.769580Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:37.769624Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:37.769705Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:37.788705Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.029000s; 2026-01-10T12:29:37.789315Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:37.789432Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:37.789513Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:37.789623Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:29:37.789715Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:37.789797Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:37.789882Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:37.790027Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:37.790240Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.043000s; 2026-01-10T12:29:37.790294Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-10T12:29:37.866484Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 111 scanId: 0 version: {1768048176835:111} readable: {1768048176835:max} at tablet 9437184 2026-01-10T12:29:37.866675Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2026-01-10T12:29:37.866745Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1768048176835:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:29:37.866862Z node 2 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1768048176835:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2026-01-10T12:29:33.039032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:33.068654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:33.068867Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:33.075814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:33.076037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:33.076239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:33.076315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:33.076397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:33.076498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:33.076584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:33.076661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:33.076751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:33.076836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:33.076944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:33.077033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:33.077107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:33.100064Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:33.100502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:33.100574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:33.100760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:33.100927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:33.101009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:33.101051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:33.101146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:33.101208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:33.101242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:33.101266Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:33.101431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:33.101490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:33.101532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:33.101579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:33.101658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:33.101702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:33.101734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:33.101757Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:33.101790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:33.101823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:33.101857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:33.101903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:33.101931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:33.101950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:33.102170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:33.102256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:33.102281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:33.102383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:33.102425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:33.102460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:33.102526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:33.102576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:33.102603Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:33.102648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:33.102673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:33.102694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:33.102786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:33.102828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:38.369326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:38.369702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:38.369945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.370142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.370347Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.370631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:38.370853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.371067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.371544Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-10T12:29:38.372198Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.036},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.039}],"full":{"a":5780514,"name":"_full_task","f":5780514,"d_finished":0,"c":0,"l":5820214,"d":39700},"events":[{"name":"bootstrap","f":5780845,"d_finished":2241,"c":1,"l":5783086,"d":2241},{"a":5819190,"name":"ack","f":5817122,"d_finished":1846,"c":1,"l":5818968,"d":2870},{"a":5819164,"name":"processing","f":5783304,"d_finished":5470,"c":3,"l":5818972,"d":6520},{"name":"ProduceResults","f":5782365,"d_finished":3366,"c":6,"l":5819690,"d":3366},{"a":5819697,"name":"Finish","f":5819697,"d_finished":0,"c":0,"l":5820214,"d":517},{"name":"task_result","f":5783330,"d_finished":3549,"c":2,"l":5816870,"d":3549}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.372292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:38.372857Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.036},{"events":["l_ProduceResults","f_Finish"],"t":0.039},{"events":["l_ack","l_processing","l_Finish"],"t":0.04}],"full":{"a":5780514,"name":"_full_task","f":5780514,"d_finished":0,"c":0,"l":5820934,"d":40420},"events":[{"name":"bootstrap","f":5780845,"d_finished":2241,"c":1,"l":5783086,"d":2241},{"a":5819190,"name":"ack","f":5817122,"d_finished":1846,"c":1,"l":5818968,"d":3590},{"a":5819164,"name":"processing","f":5783304,"d_finished":5470,"c":3,"l":5818972,"d":7240},{"name":"ProduceResults","f":5782365,"d_finished":3366,"c":6,"l":5819690,"d":3366},{"a":5819697,"name":"Finish","f":5819697,"d_finished":0,"c":0,"l":5820934,"d":1237},{"name":"task_result","f":5783330,"d_finished":3549,"c":2,"l":5816870,"d":3549}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.372951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:38.327372Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-10T12:29:38.373004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:38.373297Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2026-01-10T12:29:37.866860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:37.901699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:37.901985Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:37.909825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:37.910095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:37.910355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:37.910499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:37.910613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:37.910741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:37.910849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:37.910976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:37.911099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:37.911247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.911370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:37.911492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:37.911614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:37.942598Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:37.942981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:37.943042Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:37.943226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:37.943395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:37.943469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:37.943524Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:37.943666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:37.943740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:37.943790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:37.943823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:37.943998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:37.944062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:37.944139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:37.944182Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:37.944301Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:37.944365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:37.944439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:37.944471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:37.944528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:37.944569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:37.944600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:37.944653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:37.944706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:37.944736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:37.945155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:37.945284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:37.945320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:37.945466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:37.945524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.945561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.945620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:37.945695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:37.945732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:37.945782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:37.945823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:37.945855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:37.946023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:37.946104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mes=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=100; 2026-01-10T12:29:38.763778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=level,timestamp; 2026-01-10T12:29:38.764022Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2026-01-10T12:29:38.764191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.764332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.764490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.764674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:38.764854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.765055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.765391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:337:2349] finished for tablet 9437184 2026-01-10T12:29:38.765841Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:329:2341];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1400696,"name":"_full_task","f":1400696,"d_finished":0,"c":0,"l":1414719,"d":14023},"events":[{"name":"bootstrap","f":1400927,"d_finished":1958,"c":1,"l":1402885,"d":1958},{"a":1413871,"name":"ack","f":1412543,"d_finished":1235,"c":1,"l":1413778,"d":2083},{"a":1413859,"name":"processing","f":1403162,"d_finished":4185,"c":3,"l":1413781,"d":5045},{"name":"ProduceResults","f":1402319,"d_finished":2198,"c":6,"l":1414357,"d":2198},{"a":1414364,"name":"Finish","f":1414364,"d_finished":0,"c":0,"l":1414719,"d":355},{"name":"task_result","f":1403176,"d_finished":2898,"c":2,"l":1412452,"d":2898}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.765951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:38.766295Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:329:2341];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1400696,"name":"_full_task","f":1400696,"d_finished":0,"c":0,"l":1415255,"d":14559},"events":[{"name":"bootstrap","f":1400927,"d_finished":1958,"c":1,"l":1402885,"d":1958},{"a":1413871,"name":"ack","f":1412543,"d_finished":1235,"c":1,"l":1413778,"d":2619},{"a":1413859,"name":"processing","f":1403162,"d_finished":4185,"c":3,"l":1413781,"d":5581},{"name":"ProduceResults","f":1402319,"d_finished":2198,"c":6,"l":1414357,"d":2198},{"a":1414364,"name":"Finish","f":1414364,"d_finished":0,"c":0,"l":1415255,"d":891},{"name":"task_result","f":1403176,"d_finished":2898,"c":2,"l":1412452,"d":2898}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:38.766369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:38.686584Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:29:38.766403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:38.766543Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2026-01-10T12:29:38.767056Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-10T12:29:38.767372Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1768048178840:100} readable: {1768048178840:max} at tablet 9437184 2026-01-10T12:29:38.767468Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-10T12:29:38.767685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1768048178840:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-10T12:29:38.767779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1768048178840:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-10T12:29:38.767882Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1768048178840:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2026-01-10T12:29:35.373673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:35.396357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:35.396583Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:35.402674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:35.402886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:35.403076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:35.403153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:35.403229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:35.403302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:35.403438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:35.403582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:35.403691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:35.403789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.403886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:35.403965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:35.404036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:35.425545Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:35.426008Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:35.426101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:35.426343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:35.426528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:35.426605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:35.426654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:35.426787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:35.426871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:35.426921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:35.426957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:35.427181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:35.427261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:35.427312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:35.427358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:35.427481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:35.427555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:35.427613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:35.427657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:35.427731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:35.427781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:35.427821Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:35.427894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:35.427941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:35.427977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:35.428262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:35.428387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:35.428427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:35.428569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:35.428622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.428654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:35.428723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:35.428791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:35.428826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:35.428889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:35.428942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:35.428978Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:35.429166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:35.429240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-10T12:29:39.800137Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2026-01-10T12:29:39.785624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:39.808811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:39.809038Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:39.815772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:39.816031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:39.816285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:39.816431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:39.816527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:39.816599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:39.816684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:39.816769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:39.816835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:39.816918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:39.817019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:39.817085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:39.817182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:39.839751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:39.840150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:39.840209Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:39.840397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:39.840534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:39.840600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:39.840645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:39.840758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:39.840812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:39.840853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:39.840884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:39.841059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:39.841131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:39.841184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:39.841228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:39.841315Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:39.841356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:39.841410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:39.841432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:39.841469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:39.841510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:39.841537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:39.841572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:39.841615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:39.841638Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:39.841838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:39.841942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:39.841968Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:39.842071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:39.842112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:39.842151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:39.842202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:39.842242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:39.842268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:39.842299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:39.842325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:39.842358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:39.842492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:39.842538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 00063232;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-10T12:29:40.431243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136797338913152;op_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137003500063232;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-10T12:29:40.431311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136797338913152;op_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1768048180760;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137003500063232;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-10T12:29:40.431636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-10T12:29:40.431794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768048180760 at tablet 9437184, mediator 0 2026-01-10T12:29:40.431854Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-10T12:29:40.432161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:29:40.432280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:29:40.432323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-10T12:29:40.432403Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-10T12:29:40.440446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1768048180760;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-10T12:29:40.440554Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:40.440662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-10T12:29:40.440725Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-10T12:29:40.440911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-10T12:29:40.456843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-10T12:29:40.480509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2026-01-10T12:29:40.483454Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-10T12:29:40.483521Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=3200;operation_id=8f09516-ee2011f0-86f6ca54-1a3148a;in_flight=1;size_in_flight=3200; 2026-01-10T12:29:40.494768Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=3768;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-10T12:29:40.496434Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=3200;event=data_write_finished;writing_id=8f09516-ee2011f0-86f6ca54-1a3148a; 2026-01-10T12:29:40.496714Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=60;count=1; 2026-01-10T12:29:40.496810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=156;data_size=132;sum=156;count=2;size_of_meta=112; 2026-01-10T12:29:40.496886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=236;data_size=212;sum=236;count=1;size_of_portion=192; 2026-01-10T12:29:40.497409Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:29:40.497533Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:29:40.509848Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-10T12:29:40.510079Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:40.523961Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1768048180766 at tablet 9437184, mediator 0 2026-01-10T12:29:40.524070Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-10T12:29:40.524436Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:88;progress_tx_id=100;lock_id=1;broken=0; 2026-01-10T12:29:40.536675Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-10T12:29:40.536794Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=100;lock_id=1;broken=0; 2026-01-10T12:29:40.536997Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:29:40.537041Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-10T12:29:40.537392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:40.537467Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:40.537536Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:40.537590Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:40.537645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:29:40.549438Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:40.549544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:40.549623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:40.549760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:40.550226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1768048180766:100} readable: {1768048180766:max} at tablet 9437184 2026-01-10T12:29:40.562451Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-10T12:29:40.564728Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=100;scan_id=0;gen=0;table=;snapshot={1768048180766:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TColumnShardTestReadWrite::WriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2026-01-10T12:29:38.081957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:38.113374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:38.113596Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:38.120072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:38.120282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:38.120447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:38.120517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:38.120579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:38.120676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:38.120750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:38.120828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:38.120901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:38.120978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.121065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:38.121143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:38.121211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:38.142240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:38.142519Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:38.142560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:38.142712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:38.142937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:38.143002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:38.143032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:38.143139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:38.143189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:38.143221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:38.143253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:38.143392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:38.143435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:38.143462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:38.143504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:38.143568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:38.143604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:38.143635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:38.143656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:38.143686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:38.143717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:38.143741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:38.143771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:38.143796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:38.143822Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:38.143975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:38.144064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:38.144144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:38.144249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:38.144293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.144343Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.144393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:38.144436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:38.144463Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:38.144497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:38.144520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:38.144540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:38.144639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:38.144704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:43.420238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:43.420592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:43.420819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.420984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.421169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.421412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:43.421629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.421827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.422250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2026-01-10T12:29:43.422834Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":5801016,"name":"_full_task","f":5801016,"d_finished":0,"c":0,"l":5815408,"d":14392},"events":[{"name":"bootstrap","f":5801284,"d_finished":1812,"c":1,"l":5803096,"d":1812},{"a":5814471,"name":"ack","f":5812638,"d_finished":1651,"c":1,"l":5814289,"d":2588},{"a":5814457,"name":"processing","f":5803274,"d_finished":4597,"c":3,"l":5814293,"d":5548},{"name":"ProduceResults","f":5802550,"d_finished":2855,"c":6,"l":5814949,"d":2855},{"a":5814955,"name":"Finish","f":5814955,"d_finished":0,"c":0,"l":5815408,"d":453},{"name":"task_result","f":5803296,"d_finished":2877,"c":2,"l":5812418,"d":2877}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.422925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:43.423481Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":5801016,"name":"_full_task","f":5801016,"d_finished":0,"c":0,"l":5816064,"d":15048},"events":[{"name":"bootstrap","f":5801284,"d_finished":1812,"c":1,"l":5803096,"d":1812},{"a":5814471,"name":"ack","f":5812638,"d_finished":1651,"c":1,"l":5814289,"d":3244},{"a":5814457,"name":"processing","f":5803274,"d_finished":4597,"c":3,"l":5814293,"d":6204},{"name":"ProduceResults","f":5802550,"d_finished":2855,"c":6,"l":5814949,"d":2855},{"a":5814955,"name":"Finish","f":5814955,"d_finished":0,"c":0,"l":5816064,"d":1109},{"name":"task_result","f":5803296,"d_finished":2877,"c":2,"l":5812418,"d":2877}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:43.423582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:43.403920Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-10T12:29:43.423631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:43.423898Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderEmpty >> IncrementalRestoreScan::ChangeSenderSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2026-01-10T12:29:09.343408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.369151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.369401Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.384005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.384250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.384487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.384567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.384660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.384762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.384859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.384933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.385007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.385092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.385196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.385267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.385352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.407739Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.408166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.408237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.408482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.410463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.410516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.410657Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.410750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.410809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.410862Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.411057Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.411167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.411196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.411280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.411329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.411359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.411379Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.411417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.411444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.411463Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.411489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.411519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.411579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.411795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.411926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.411963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.412072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.412130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.412159Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.412208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.412256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.412289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.412330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.412367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.412403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.412565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.412630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-10T12:29:43.741170Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=722; 2026-01-10T12:29:43.741215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=42231; 2026-01-10T12:29:43.741252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=42380; 2026-01-10T12:29:43.741313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-10T12:29:43.741565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=211; 2026-01-10T12:29:43.741596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43020; 2026-01-10T12:29:43.741791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=134; 2026-01-10T12:29:43.741910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-10T12:29:43.742208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=257; 2026-01-10T12:29:43.742438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=186; 2026-01-10T12:29:43.752603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10107; 2026-01-10T12:29:43.764644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11926; 2026-01-10T12:29:43.764760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-10T12:29:43.764823Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2026-01-10T12:29:43.764876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:29:43.764957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2026-01-10T12:29:43.765003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:29:43.765114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2026-01-10T12:29:43.765163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:29:43.765232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2026-01-10T12:29:43.765328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2026-01-10T12:29:43.765408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-10T12:29:43.765448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73576; 2026-01-10T12:29:43.765646Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:43.765752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:43.765801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:43.765887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:43.765925Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:43.766062Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:43.766109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:43.766139Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:43.766171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:43.766222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047851759;tx_id=18446744073709551615;;current_snapshot_ts=1768048150782; 2026-01-10T12:29:43.766254Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:43.766291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:43.766319Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:43.766399Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:43.766603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.019000s; 2026-01-10T12:29:43.770271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:43.770612Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:43.770688Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:43.770770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:43.770822Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:43.770893Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047851759;tx_id=18446744073709551615;;current_snapshot_ts=1768048150782; 2026-01-10T12:29:43.770946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:43.771018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:43.771072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:43.771166Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:29:43.771214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:43.772038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.090000s; 2026-01-10T12:29:43.772093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged >> Backup::ProposeBackup >> TSlotIndexesPoolTest::Basic [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2026-01-10T12:29:11.102942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:11.138275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:11.138642Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:11.146322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:11.146596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:11.146838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:11.146981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:11.147113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:11.147245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:11.147367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:11.147482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:11.147584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:11.147697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:11.147850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:11.147997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:11.148156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:11.182553Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:11.182952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:11.183014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:11.183235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:11.183405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:11.183482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:11.183524Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:11.183646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:11.183721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:11.183765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:11.183821Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:11.184007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:11.184116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:11.184165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:11.184205Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:11.184307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:11.184362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:11.184413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:11.184443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:11.184489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:11.184524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:11.184557Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:11.184601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:11.184643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:11.184671Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:11.184918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:11.185049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:11.185083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:11.185301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:11.185350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:11.185381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:11.185438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:11.185496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:11.185537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:11.185582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:11.185623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:11.185653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:11.185816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:11.185898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-10T12:29:44.881248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=882; 2026-01-10T12:29:44.881300Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=68296; 2026-01-10T12:29:44.881345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=68418; 2026-01-10T12:29:44.881412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-10T12:29:44.881760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=301; 2026-01-10T12:29:44.881805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=69340; 2026-01-10T12:29:44.881980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=124; 2026-01-10T12:29:44.882098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-10T12:29:44.882487Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=343; 2026-01-10T12:29:44.882842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=309; 2026-01-10T12:29:44.900201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17293; 2026-01-10T12:29:44.917809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17510; 2026-01-10T12:29:44.917916Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=29; 2026-01-10T12:29:44.917974Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:29:44.918015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:29:44.918088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2026-01-10T12:29:44.918129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:29:44.918235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-10T12:29:44.918290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-10T12:29:44.918364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2026-01-10T12:29:44.918449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2026-01-10T12:29:44.918529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2026-01-10T12:29:44.918564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=112260; 2026-01-10T12:29:44.918699Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:44.918807Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:44.918860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:44.918927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:44.918971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:44.919152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:44.919209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:44.919247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:44.919291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:44.919355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853512;tx_id=18446744073709551615;;current_snapshot_ts=1768048152577; 2026-01-10T12:29:44.919396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:44.919441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:44.919478Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:44.919572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:44.919756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.104000s; 2026-01-10T12:29:44.923857Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:44.924222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:44.924310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:44.924383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:44.924432Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:44.924499Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853512;tx_id=18446744073709551615;;current_snapshot_ts=1768048152577; 2026-01-10T12:29:44.924550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:44.924597Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:44.924637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:44.924709Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:29:44.924762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:44.925567Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.061000s; 2026-01-10T12:29:44.925614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TColumnShardTestReadWrite::WriteRead [GOOD] >> TNodeBrokerTest::TestRandomActions >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2026-01-10T12:29:28.274835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:28.300783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:28.301081Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:28.307871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:28.308129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:28.308316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:28.308393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:28.308467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:28.308549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:28.308671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:28.308753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:28.308843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:28.308928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.309036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:28.309142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:28.309243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:28.332975Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:28.333296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:28.333350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:28.333502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:28.333629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:28.333696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:28.333732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:28.333829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:28.333913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:28.333948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:28.333971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:28.334114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:28.334169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:28.334203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:28.334235Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:28.334338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:28.334405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:28.334451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:28.334472Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:28.334518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:28.334554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:28.334581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:28.334618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:28.334644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:28.334663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:28.334832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:28.334957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:28.334984Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:28.335091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:28.335123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.335145Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.335188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:28.335238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:28.335277Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:28.335327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:28.335360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:28.335415Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:28.335548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:28.335612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... BUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:48.519020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.519053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:29:48.519152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-10T12:29:48.519195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=timestamp; 2026-01-10T12:29:48.519425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2026-01-10T12:29:48.519579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.519715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.519841Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.520377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:48.520529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.520656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.520882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:3596:5602] finished for tablet 9437184 2026-01-10T12:29:48.521334Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:3589:5595];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.058},{"events":["f_ack"],"t":0.06},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.062}],"full":{"a":20717697,"name":"_full_task","f":20717697,"d_finished":0,"c":0,"l":20779956,"d":62259},"events":[{"name":"bootstrap","f":20718201,"d_finished":1545,"c":1,"l":20719746,"d":1545},{"a":20779370,"name":"ack","f":20777863,"d_finished":1024,"c":1,"l":20778887,"d":1610},{"a":20779351,"name":"processing","f":20720786,"d_finished":12898,"c":14,"l":20778890,"d":13503},{"name":"ProduceResults","f":20719295,"d_finished":4045,"c":17,"l":20779700,"d":4045},{"a":20779705,"name":"Finish","f":20779705,"d_finished":0,"c":0,"l":20779956,"d":251},{"name":"task_result","f":20720801,"d_finished":11630,"c":13,"l":20776589,"d":11630}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.521392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:48.521787Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:3589:5595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.058},{"events":["f_ack"],"t":0.06},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.062}],"full":{"a":20717697,"name":"_full_task","f":20717697,"d_finished":0,"c":0,"l":20780439,"d":62742},"events":[{"name":"bootstrap","f":20718201,"d_finished":1545,"c":1,"l":20719746,"d":1545},{"a":20779370,"name":"ack","f":20777863,"d_finished":1024,"c":1,"l":20778887,"d":2093},{"a":20779351,"name":"processing","f":20720786,"d_finished":12898,"c":14,"l":20778890,"d":13986},{"name":"ProduceResults","f":20719295,"d_finished":4045,"c":17,"l":20779700,"d":4045},{"a":20779705,"name":"Finish","f":20779705,"d_finished":0,"c":0,"l":20780439,"d":734},{"name":"task_result","f":20720801,"d_finished":11630,"c":13,"l":20776589,"d":11630}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:48.521862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:48.454990Z;index_granules=0;index_portions=12;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=16464;inserted_portions_bytes=14016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=30480;selected_rows=0; 2026-01-10T12:29:48.521921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:48.522059Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=d9f8388-ee2011f0-80c72801-1f4be0ad; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> IncrementalRestoreScan::Empty [GOOD] >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:50.686521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:50.822807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:50.833196Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:50.833750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:50.833874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:51.228510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:51.228662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:51.233630Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048187451161 != 1768048187451164 2026-01-10T12:29:51.243271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:51.287048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:51.371381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:51.663579Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Exhausted 2026-01-10T12:29:51.663744Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2026-01-10T12:29:51.663837Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Finish Done ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] Test command err: 2026-01-10T12:29:50.171950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:50.172049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2026-01-10T12:29:48.723927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:48.724889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2026-01-10T12:29:46.694442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:46.725813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:46.726013Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:46.731930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:46.732161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:46.732334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:46.732409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:46.732479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:46.732557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:46.732651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:46.732723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:46.732790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:46.732874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.732964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:46.733070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:46.733164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:46.758492Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:46.758820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:46.758866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:46.759016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:46.759183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:46.759242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:46.759273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:46.759361Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:46.759434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:46.759479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:46.759515Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:46.759683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:46.759740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:46.759773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:46.759805Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:46.759891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:46.759934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:46.759999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:46.760030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:46.760068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:46.760134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:46.760157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:46.760190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:46.760214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:46.760231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:46.760376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:46.760481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:46.760508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:46.760622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:46.760661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.760685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.760740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:46.760787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:46.760823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:46.760859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:46.760891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:46.760915Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:46.760997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:46.761047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... urce_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:50.528838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:50.529191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-10T12:29:50.529422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.529606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.529794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.530078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:50.530296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.530482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.530901Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-10T12:29:50.531507Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":4290096,"name":"_full_task","f":4290096,"d_finished":0,"c":0,"l":4307497,"d":17401},"events":[{"name":"bootstrap","f":4290433,"d_finished":1900,"c":1,"l":4292333,"d":1900},{"a":4306569,"name":"ack","f":4304580,"d_finished":1768,"c":1,"l":4306348,"d":2696},{"a":4306551,"name":"processing","f":4292536,"d_finished":5054,"c":3,"l":4306352,"d":6000},{"name":"ProduceResults","f":4291773,"d_finished":3010,"c":6,"l":4307037,"d":3010},{"a":4307044,"name":"Finish","f":4307044,"d_finished":0,"c":0,"l":4307497,"d":453},{"name":"task_result","f":4292557,"d_finished":3210,"c":2,"l":4304309,"d":3210}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.531608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:50.532190Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":4290096,"name":"_full_task","f":4290096,"d_finished":0,"c":0,"l":4308180,"d":18084},"events":[{"name":"bootstrap","f":4290433,"d_finished":1900,"c":1,"l":4292333,"d":1900},{"a":4306569,"name":"ack","f":4304580,"d_finished":1768,"c":1,"l":4306348,"d":3379},{"a":4306551,"name":"processing","f":4292536,"d_finished":5054,"c":3,"l":4306352,"d":6683},{"name":"ProduceResults","f":4291773,"d_finished":3010,"c":6,"l":4307037,"d":3010},{"a":4307044,"name":"Finish","f":4307044,"d_finished":0,"c":0,"l":4308180,"d":1136},{"name":"task_result","f":4292557,"d_finished":3210,"c":2,"l":4304309,"d":3210}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:50.532291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:50.509341Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-10T12:29:50.532345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:50.532634Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:50.681091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:50.770996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:50.777773Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:50.778142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:50.778248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:51.063376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:51.063566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:51.068306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048187451223 != 1768048187451226 2026-01-10T12:29:51.082250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:51.125482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:51.234208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:51.671169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:29:51.671425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.671650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:29:51.671729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:29:51.672928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:29:51.673045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:51.673944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.674121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:29:51.674367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.674468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:29:51.674510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:29:51.674562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:29:51.675289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.675359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:29:51.675406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:29:51.675975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.676022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.676096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.676149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.680124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:29:51.680776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:29:51.680948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:29:51.681615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.681680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-10T12:29:51.681723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.693447Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:51.792310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.792499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:29:51.792552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.792958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:29:51.793033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.793206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:29:51.793275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:29:51.794493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:29:51.794637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:29:51.794897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:29:51.794971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:558:2492], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-10T12:29:51.795657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.795726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-10T12:29:51.795827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:29:51.795862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.795907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:29:51.795945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.795990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:29:51.796038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.796116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:29:51.796177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:29:51.796253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-10T12:29:51.796294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-10T12:29:51.796353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594 ... ountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-10T12:29:52.446848Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-10T12:29:52.446973Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1024:2865] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-10T12:29:52.447319Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1024:2865] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:29:52.447383Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1024:2865] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2026-01-10T12:29:52.448226Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1024:2865] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:29:52.449600Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:29:52.449802Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:29:52.450146Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:29:52.450312Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:29:50.292623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:29:50.414829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:29:50.437976Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:29:50.438775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:29:50.438923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:29:51.023178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:51.023364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:51.030696Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048187451168 != 1768048187451171 2026-01-10T12:29:51.052834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:51.105606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:51.221640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:29:51.680581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-10T12:29:51.680856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.681108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:29:51.681167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:29:51.681409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:29:51.681519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:51.682371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.682546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:29:51.682798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.682883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:29:51.682931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:29:51.682971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:29:51.683701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.683774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:29:51.683826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:29:51.684408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.684453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.684501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.684564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.689115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:29:51.689826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:29:51.690002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:29:51.690732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.690798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-10T12:29:51.690844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.702769Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:51.800343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:29:51.800567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:29:51.800624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.800947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:29:51.801019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-10T12:29:51.801238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:29:51.801330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:29:51.802476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:29:51.802537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:29:51.802782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-10T12:29:51.802846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:558:2492], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-10T12:29:51.803464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-10T12:29:51.803533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-10T12:29:51.803655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:29:51.803699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.803744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-10T12:29:51.803790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.803830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-10T12:29:51.803879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-10T12:29:51.803939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-10T12:29:51.803976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-10T12:29:51.804050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-10T12:29:51.804124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-10T12:29:51.804174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594 ... -01-10T12:29:52.440333Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-10T12:29:52.440586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2026-01-10T12:29:52.441052Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-10T12:29:52.441187Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1012:2859] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-10T12:29:52.443346Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1012:2859] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:29:52.443521Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1012:2859] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2026-01-10T12:29:52.444873Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1012:2859] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-10T12:29:52.447476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:1022:2863], serverId# [1:1023:2864], sessionId# [0:0:0] 2026-01-10T12:29:52.449818Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:29:52.450183Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-10T12:29:52.450548Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-10T12:29:52.450765Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] BodySize: 18 }] } 2026-01-10T12:29:52.450948Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-10T12:29:52.451208Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:68:2115] Handle TEvGetProxyServicesRequest 2026-01-10T12:29:52.451337Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-10T12:29:52.451770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1029:2869], serverId# [1:1030:2870], sessionId# [0:0:0] 2026-01-10T12:29:52.494684Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2026-01-10T12:29:52.494851Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:29:52.495101Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-10T12:29:52.495198Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-10T12:29:52.495365Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2026-01-10T12:29:48.847322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:48.870423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:48.870631Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:48.876932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:48.877112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:48.877348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:48.877455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:48.877533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:48.877602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:48.877681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:48.877763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:48.877874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:48.877977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:48.878049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:48.878112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:48.878177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:48.899870Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:48.900020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:48.900093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:48.900315Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:48.900455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:48.900520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:48.900555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:48.900654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:48.900715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:48.900758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:48.900781Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:48.900914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:48.900966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:48.900997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:48.901018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:48.901140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:48.901193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:48.901242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:48.901270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:48.901313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:48.901348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:48.901373Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:48.901409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:48.901444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:48.901477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:48.901722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:48.901786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:48.901820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:48.901940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:48.901972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:48.901992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:48.902061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:48.902097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:48.902130Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:48.902163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:48.902245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:48.902271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:48.902378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:48.902420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... T12:29:51.196613Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2026-01-10T12:29:51.196682Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1768048191494 last planned step 1768048191494 at tablet 9437184 2026-01-10T12:29:51.209331Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-10T12:29:51.209412Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-10T12:29:51.209495Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2026-01-10T12:29:51.209963Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1768048191494:max} readable: {1768048191494:max} at tablet 9437184 2026-01-10T12:29:51.210153Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-10T12:29:51.210415Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:111:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768048191494:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-10T12:29:51.210527Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:111:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768048191494:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-10T12:29:51.211499Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:111:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768048191494:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2026-01-10T12:29:51.213741Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:111:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768048191494:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-10T12:29:51.230826Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:111:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1768048191494:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[2:182:2194];trace_detailed=; 2026-01-10T12:29:51.231858Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2;column_names=field,key;);; 2026-01-10T12:29:51.232146Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2026-01-10T12:29:51.232529Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.232722Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.232963Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:51.233178Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.233389Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.233632Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:182:2194] finished for tablet 9437184 2026-01-10T12:29:51.234189Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:181:2193];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3038536,"name":"_full_task","f":3038536,"d_finished":0,"c":0,"l":3041540,"d":3004},"events":[{"name":"bootstrap","f":3038877,"d_finished":1740,"c":1,"l":3040617,"d":1740},{"a":3040766,"name":"ack","f":3040766,"d_finished":0,"c":0,"l":3041540,"d":774},{"a":3040737,"name":"processing","f":3040737,"d_finished":0,"c":0,"l":3041540,"d":803},{"name":"ProduceResults","f":3040196,"d_finished":841,"c":2,"l":3041256,"d":841},{"a":3041263,"name":"Finish","f":3041263,"d_finished":0,"c":0,"l":3041540,"d":277}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.234293Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:181:2193];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:51.234798Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:181:2193];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3038536,"name":"_full_task","f":3038536,"d_finished":0,"c":0,"l":3042200,"d":3664},"events":[{"name":"bootstrap","f":3038877,"d_finished":1740,"c":1,"l":3040617,"d":1740},{"a":3040766,"name":"ack","f":3040766,"d_finished":0,"c":0,"l":3042200,"d":1434},{"a":3040737,"name":"processing","f":3040737,"d_finished":0,"c":0,"l":3042200,"d":1463},{"name":"ProduceResults","f":3040196,"d_finished":841,"c":2,"l":3041256,"d":841},{"a":3041263,"name":"Finish","f":3041263,"d_finished":0,"c":0,"l":3042200,"d":937}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.235000Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:51.213698Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-10T12:29:51.235060Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:51.235223Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2026-01-10T12:29:47.286966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:47.310949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:47.311179Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:47.317945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:47.318180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:47.318414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:47.318495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:47.318574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:47.318686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:47.318800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:47.318900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:47.318991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:47.319087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:47.319210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:47.319289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:47.319398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:47.346146Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:47.346533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:47.346643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:47.346812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:47.346976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:47.347042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:47.347088Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:47.347218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:47.347296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:47.347332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:47.347357Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:47.347515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:47.347570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:47.347622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:47.347661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:47.347803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:47.347851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:47.347883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:47.347906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:47.347943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:47.347978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:47.348011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:47.348048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:47.348099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:47.348128Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:47.348345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:47.348482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:47.348511Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:47.348631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:47.348673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:47.348695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:47.348757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:47.348809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:47.348837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:47.348880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:47.348909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:47.348930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:47.349034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:47.349090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-10T12:29:51.217886Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-10T12:29:51.218283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-10T12:29:51.218558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.218762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.218959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.219233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:29:51.219426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.219615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.220023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2026-01-10T12:29:51.220665Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":4388339,"name":"_full_task","f":4388339,"d_finished":0,"c":0,"l":4406285,"d":17946},"events":[{"name":"bootstrap","f":4388696,"d_finished":1945,"c":1,"l":4390641,"d":1945},{"a":4405365,"name":"ack","f":4403291,"d_finished":1863,"c":1,"l":4405154,"d":2783},{"a":4405349,"name":"processing","f":4390828,"d_finished":5255,"c":3,"l":4405158,"d":6191},{"name":"ProduceResults","f":4390044,"d_finished":3203,"c":6,"l":4405810,"d":3203},{"a":4405816,"name":"Finish","f":4405816,"d_finished":0,"c":0,"l":4406285,"d":469},{"name":"task_result","f":4390850,"d_finished":3287,"c":2,"l":4403044,"d":3287}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.220781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:29:51.221314Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":4388339,"name":"_full_task","f":4388339,"d_finished":0,"c":0,"l":4407001,"d":18662},"events":[{"name":"bootstrap","f":4388696,"d_finished":1945,"c":1,"l":4390641,"d":1945},{"a":4405365,"name":"ack","f":4403291,"d_finished":1863,"c":1,"l":4405154,"d":3499},{"a":4405349,"name":"processing","f":4390828,"d_finished":5255,"c":3,"l":4405158,"d":6907},{"name":"ProduceResults","f":4390044,"d_finished":3203,"c":6,"l":4405810,"d":3203},{"a":4405816,"name":"Finish","f":4405816,"d_finished":0,"c":0,"l":4407001,"d":1185},{"name":"task_result","f":4390850,"d_finished":3287,"c":2,"l":4403044,"d":3287}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:29:51.221403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:29:51.197775Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-10T12:29:51.221474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:29:51.221783Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.0%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::NodesMigrationExpireActive >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::Test1001NodesSubscribers >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-10T12:29:51.651322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:51.651384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:29:51.741315Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-10T12:29:51.754264Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-10T12:29:55.032144Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:55.032210Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::NodesMigrationReuseID >> KqpBatchUpdate::SimplePartitions [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-10T12:29:55.496359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:55.496438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-10T12:29:56.229086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.229160Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-10T12:29:56.287424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.287501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-10T12:29:57.070324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:57.070380Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] >> TNodeBrokerTest::NodesMigrationReuseExpiredID >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] Test command err: 2026-01-10T12:29:55.609203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:55.609267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-10T12:29:56.461158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.461231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR 2026-01-10T12:29:57.219191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:57.219256Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 63135, MsgBus: 28016 2026-01-10T12:25:09.839973Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593707948058063486:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:09.840013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:25:10.084288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:25:10.160553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:25:10.160657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:25:10.193210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:25:10.271571Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:10.287723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:25:10.432738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:25:10.432783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:25:10.432812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:25:10.432962Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:25:10.852277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:25:11.061482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:25:11.071537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:25:11.132404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.283318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.447182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:11.524785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.274801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965237934516:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.274971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.275708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707965237934526:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.275802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:13.699599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.733990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.768545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.802336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.838933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.878204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.927200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:13.983707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:25:14.101853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969532902699:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.101940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.102391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969532902704:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.102429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593707969532902705:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.102458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:25:14.106494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:25:14.119019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593707969532902708:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:25:14.215462Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593707969532902759:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:25:14.840091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593707948058063486:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:14.840162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28478, MsgBus: 5584 2026-01-10T12:25:24.974724Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708014925253641:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:25:24.980415Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cp ... e 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 6070, MsgBus: 11695 2026-01-10T12:29:37.500385Z node 16 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7593709098954314981:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:37.500508Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:29:37.518737Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:29:37.609052Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:29:37.610892Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7593709098954314953:2081] 1768048177497760 != 1768048177497763 2026-01-10T12:29:37.649644Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:29:37.649773Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:29:37.669396Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:29:37.709503Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:29:37.709531Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:29:37.709541Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:29:37.709652Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:29:37.801220Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:29:38.260070Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:29:38.277955Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:38.336416Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:38.500396Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:38.507792Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:29:38.583625Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:42.500919Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7593709098954314981:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:29:42.501024Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:29:42.749050Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593709120429153313:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:42.749137Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:42.749402Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593709120429153323:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:42.749457Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:42.840209Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:42.876542Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:42.911586Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:42.946224Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:42.980440Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:43.020354Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:43.066240Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:43.122939Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:43.213337Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593709124724121490:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:43.213435Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:43.213444Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593709124724121495:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:43.213699Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7593709124724121497:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:43.213774Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:29:43.217532Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:29:43.230563Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7593709124724121498:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:29:43.300367Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7593709124724121550:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:29:52.593390Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:29:52.593447Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2026-01-10T12:29:56.241500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.241563Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2026-01-10T12:29:09.343417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.377562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.377803Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.385203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.385441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.385665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.385829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.385946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.386053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.386182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.386412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.386534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.386667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.386772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.386956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.387069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.418512Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.418855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.418938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.419155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.419422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.419473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.419575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.419707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.419738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.419915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.420035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.420109Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.420216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.420273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.420315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.420346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.420419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.420459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.420514Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.420562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.420602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.420631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.420863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.421043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.421081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.421213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.421253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.421284Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.421349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.421409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.421439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.421484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.421520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.421550Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.421745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.421835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-10T12:29:55.160793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1184; 2026-01-10T12:29:55.160868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=67931; 2026-01-10T12:29:55.160921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=68074; 2026-01-10T12:29:55.160994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-10T12:29:55.161536Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=482; 2026-01-10T12:29:55.161604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=69232; 2026-01-10T12:29:55.161806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=127; 2026-01-10T12:29:55.162006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=126; 2026-01-10T12:29:55.162494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=425; 2026-01-10T12:29:55.162919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=370; 2026-01-10T12:29:55.182208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19199; 2026-01-10T12:29:55.197588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15263; 2026-01-10T12:29:55.197695Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-10T12:29:55.197757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-10T12:29:55.197808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2026-01-10T12:29:55.197886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2026-01-10T12:29:55.197947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:29:55.198032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-10T12:29:55.198078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-10T12:29:55.198150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-01-10T12:29:55.198243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-10T12:29:55.198328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=47; 2026-01-10T12:29:55.198368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=115708; 2026-01-10T12:29:55.198509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:55.198615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:55.198668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:55.198730Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:55.198776Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:55.199024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:55.199079Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:55.199121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:55.199167Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:55.199232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853381;tx_id=18446744073709551615;;current_snapshot_ts=1768048150837; 2026-01-10T12:29:55.199278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:55.199320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:55.199357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:55.199447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:55.199630Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.066000s; 2026-01-10T12:29:55.202903Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:55.203295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:55.203358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:55.203440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:55.203491Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:55.203562Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853381;tx_id=18446744073709551615;;current_snapshot_ts=1768048150837; 2026-01-10T12:29:55.203620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:55.203669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:55.203712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:55.203797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:29:55.203848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:55.204614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.070000s; 2026-01-10T12:29:55.204664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] Test command err: 2026-01-10T12:29:56.982074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.982148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::SubscribeToNodes >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TNodeBrokerTest::NodesMigration999Nodes >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-10T12:29:59.442891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:59.442948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2026-01-10T12:30:00.200502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:00.200564Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> TNodeBrokerTest::Test1000NodesSubscribers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2026-01-10T12:29:23.243627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:23.279185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:23.279454Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:23.287825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:23.288141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:23.288395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:23.288522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:23.288642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:23.288784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:23.288910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:23.289047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:23.289169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:23.289293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.289439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:23.289565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:23.289693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:23.323060Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:23.323466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:23.323534Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:23.323737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.323915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:23.323998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:23.324054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:23.324196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:23.324320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:23.324381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:23.324419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:23.324630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.324712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:23.324774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:23.324832Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:23.324945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:23.325016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:23.325075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:23.325111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:23.325165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:23.325210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:23.325244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:23.325295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:23.325343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:23.325377Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:23.325638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:23.325807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:23.325872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:23.326054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:23.326121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.326171Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.326240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:23.326309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:23.326345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:23.326415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:23.326464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:23.326523Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:23.326684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:23.326762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:29:57.386667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=822; 2026-01-10T12:29:57.386724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=44970; 2026-01-10T12:29:57.386773Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=45065; 2026-01-10T12:29:57.386844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:29:57.387213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=326; 2026-01-10T12:29:57.387251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=45880; 2026-01-10T12:29:57.387392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=99; 2026-01-10T12:29:57.387496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=62; 2026-01-10T12:29:57.387822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=291; 2026-01-10T12:29:57.388120Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=213; 2026-01-10T12:29:57.399715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11519; 2026-01-10T12:29:57.411603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11779; 2026-01-10T12:29:57.411715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:29:57.411759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-10T12:29:57.411790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:29:57.411859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2026-01-10T12:29:57.411893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:29:57.411979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-01-10T12:29:57.412013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:29:57.412101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2026-01-10T12:29:57.412185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2026-01-10T12:29:57.412264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-10T12:29:57.412295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=77169; 2026-01-10T12:29:57.412403Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:57.412490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:57.412540Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:57.412600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:57.412637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:57.412766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:57.412808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:57.412834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:57.412868Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:57.412916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047865702;tx_id=18446744073709551615;;current_snapshot_ts=1768048164715; 2026-01-10T12:29:57.412946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:57.412977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.413005Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.413088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:57.413236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.177000s; 2026-01-10T12:29:57.417208Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:57.417687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:57.417770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:57.417863Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:57.417937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:57.418009Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047865702;tx_id=18446744073709551615;;current_snapshot_ts=1768048164715; 2026-01-10T12:29:57.418059Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:57.418108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.418152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.418235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:29:57.418291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:57.419326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.069000s; 2026-01-10T12:29:57.419397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2026-01-10T12:29:56.674224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:56.674307Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2026-01-10T12:29:09.343406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.377119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.377377Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.384805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.385042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.385277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.385403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.385532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.385645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.385753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.385876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.385977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.386122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.386253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.386361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.386466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.418463Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.418918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.418969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.419164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.419397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.419443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.419511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.419615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.419639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.419775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.419835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.419869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.419898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.419990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.420038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.420071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.420130Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.420166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.420196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.420227Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.420274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.420315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.420353Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.420551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.420687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.420712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.420804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.420832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.420853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.420897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.420948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.420981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.421036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.421069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.421092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.421197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.421251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-10T12:29:57.859017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=904; 2026-01-10T12:29:57.859065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=49463; 2026-01-10T12:29:57.859102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=49554; 2026-01-10T12:29:57.859154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-10T12:29:57.859493Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=280; 2026-01-10T12:29:57.859533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=50312; 2026-01-10T12:29:57.859660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2026-01-10T12:29:57.859754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=53; 2026-01-10T12:29:57.860060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=263; 2026-01-10T12:29:57.860466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=329; 2026-01-10T12:29:57.872527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11989; 2026-01-10T12:29:57.884272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11632; 2026-01-10T12:29:57.884391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-10T12:29:57.884480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=19; 2026-01-10T12:29:57.884541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:29:57.884635Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-10T12:29:57.884684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2026-01-10T12:29:57.884774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-10T12:29:57.884815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:29:57.884877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-10T12:29:57.884958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2026-01-10T12:29:57.885020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=36; 2026-01-10T12:29:57.885049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=82279; 2026-01-10T12:29:57.885157Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:57.885253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:57.885322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:57.885380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:57.885419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:57.885565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:57.885613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:57.885645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:57.885681Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:57.885734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853448;tx_id=18446744073709551615;;current_snapshot_ts=1768048150837; 2026-01-10T12:29:57.885768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:57.885814Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.885849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.885954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:57.886142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.142000s; 2026-01-10T12:29:57.888907Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:57.889258Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:57.889322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:57.889388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:57.889445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:57.889507Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047853448;tx_id=18446744073709551615;;current_snapshot_ts=1768048150837; 2026-01-10T12:29:57.889549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:57.889595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.889629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:57.889695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:29:57.889736Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:57.890325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.102000s; 2026-01-10T12:29:57.890375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false [GOOD] >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] Test command err: 2026-01-10T12:29:58.623768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:58.623854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2026-01-10T12:30:00.961147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:00.961222Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:01.176881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2026-01-10T12:30:00.627725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:00.627791Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:00.728140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:30:00.770267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2026-01-10T12:29:26.099502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:26.133327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:26.133578Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:26.141343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:26.141610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:26.141856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:26.141983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:26.142125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:26.142247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:26.142402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:26.142513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:26.142623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:26.142760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:26.142906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:26.143020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:26.143127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:26.178953Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:26.179341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:26.179397Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:26.179588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:26.179758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:26.179835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:26.179881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:26.180009Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:26.180106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:26.180154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:26.180194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:26.180419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:26.180485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:26.180532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:26.180571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:26.180674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:26.180730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:26.180778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:26.180815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:26.180865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:26.180904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:26.180934Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:26.180983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:26.181022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:26.181052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:26.181301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:26.181408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:26.181458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:26.181608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:26.181656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:26.181692Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:26.181759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:26.181813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:26.181863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:26.181912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:26.181952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:26.181985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:26.182117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:26.182190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-10T12:29:59.313073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=928; 2026-01-10T12:29:59.313136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=42973; 2026-01-10T12:29:59.313188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=43084; 2026-01-10T12:29:59.313241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-10T12:29:59.313502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=230; 2026-01-10T12:29:59.313541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43764; 2026-01-10T12:29:59.313659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=73; 2026-01-10T12:29:59.313745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=51; 2026-01-10T12:29:59.314040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=261; 2026-01-10T12:29:59.314267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=197; 2026-01-10T12:29:59.324196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9875; 2026-01-10T12:29:59.333608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9319; 2026-01-10T12:29:59.333698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:29:59.333741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-10T12:29:59.333771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:29:59.333831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-01-10T12:29:59.333889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2026-01-10T12:29:59.334055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=111; 2026-01-10T12:29:59.334115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-10T12:29:59.334214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=51; 2026-01-10T12:29:59.334322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-10T12:29:59.334446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=74; 2026-01-10T12:29:59.334566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=71004; 2026-01-10T12:29:59.334718Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:29:59.334841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:29:59.334917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:29:59.335016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:29:59.335068Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:29:59.335218Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:59.335274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:29:59.335313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:59.335353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:59.335414Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047868550;tx_id=18446744073709551615;;current_snapshot_ts=1768048167573; 2026-01-10T12:29:59.335458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:59.335508Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:59.335551Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:59.335646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:59.335862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.186000s; 2026-01-10T12:29:59.339679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:29:59.340180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:29:59.340231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:29:59.340302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:29:59.340348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:29:59.340412Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047868550;tx_id=18446744073709551615;;current_snapshot_ts=1768048167573; 2026-01-10T12:29:59.340460Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:29:59.340501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:59.340546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:29:59.340617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:29:59.340665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:29:59.341514Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.078000s; 2026-01-10T12:29:59.341560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TNodeBrokerTest::SingleDomainModeBannedIds |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] Test command err: 2026-01-10T12:30:00.369712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:00.369805Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for updates are sent ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... waiting for updates are sent (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID >> TNodeBrokerTest::SubscribeToNodes [GOOD] >> TNodeBrokerTest::Test999NodesSubscribers >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::TestListNodesEpochDeltas >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2026-01-10T12:30:00.681480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:00.681567Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:02.722428Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2026-01-10T12:29:59.466219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:59.466300Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:29:59.552426Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-10T12:29:59.565200Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-10T12:30:02.866390Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:02.866483Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TNodeBrokerTest::UpdateNodesLog >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-10T12:30:04.260334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:04.260397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:04.850588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:04.850638Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2026-01-10T12:30:03.941774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:03.941862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:04.160371Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2026-01-10T12:30:04.187569Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-10T12:30:04.201735Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2026-01-10T12:30:05.024197Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-10T12:30:05.053642Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2026-01-10T12:29:17.730617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:17.761825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:17.762088Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:17.769438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:17.769615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:17.769793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:17.769908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:17.770004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:17.770065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:17.770148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:17.770249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:17.770370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:17.770544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.770697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:17.770824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:17.770962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:17.799152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:17.799507Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:17.799559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:17.799755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:17.799930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:17.799998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:17.800091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:17.800218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:17.800287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:17.800321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:17.800342Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:17.800467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:17.800508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:17.800533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:17.800557Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:17.800622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:17.800657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:17.800687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:17.800704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:17.800733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:17.800765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:17.800793Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:17.800826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:17.800865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:17.800885Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:17.801092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:17.801210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:17.801248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:17.801376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:17.801435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.801473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:17.801536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:17.801590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:17.801633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:17.801690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:17.801736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:17.801769Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:17.801954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:17.802022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-10T12:30:01.955825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=989; 2026-01-10T12:30:01.955879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=58862; 2026-01-10T12:30:01.955931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=58982; 2026-01-10T12:30:01.956005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-10T12:30:01.956381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=325; 2026-01-10T12:30:01.956428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=59891; 2026-01-10T12:30:01.956592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2026-01-10T12:30:01.956712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-10T12:30:01.957093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=333; 2026-01-10T12:30:01.957423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=280; 2026-01-10T12:30:01.973685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16188; 2026-01-10T12:30:01.988337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14529; 2026-01-10T12:30:01.988455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-10T12:30:01.988512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-10T12:30:01.988554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:30:01.988630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2026-01-10T12:30:01.988676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:30:01.988768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-10T12:30:01.988811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-10T12:30:01.988879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-10T12:30:01.988964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2026-01-10T12:30:01.989044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2026-01-10T12:30:01.989083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=101331; 2026-01-10T12:30:01.989226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:01.989335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:01.989389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:01.989454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:01.989499Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:01.989696Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:01.989755Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:01.989790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:01.989827Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:01.989880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047861802;tx_id=18446744073709551615;;current_snapshot_ts=1768048159259; 2026-01-10T12:30:01.989914Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:01.989960Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:01.989990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:01.990066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:01.990227Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.069000s; 2026-01-10T12:30:01.992597Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:01.992883Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:01.992925Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:01.992992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:01.993031Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:01.993081Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047861802;tx_id=18446744073709551615;;current_snapshot_ts=1768048159259; 2026-01-10T12:30:01.993115Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:01.993148Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:01.993176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:01.993231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:01.993268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:01.993797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.108000s; 2026-01-10T12:30:01.993832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] Test command err: 2026-01-10T12:30:03.542650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:03.542710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] Test command err: 2026-01-10T12:30:01.100645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:01.100709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] Test command err: 2026-01-10T12:30:02.109766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:02.109842Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:25:47.697753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:25:47.697871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.697923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:25:47.697984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:25:47.698029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:25:47.698064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:25:47.698153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:25:47.699035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:25:47.700022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:25:47.701365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:25:47.805038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:25:47.805119Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:25:47.823479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:25:47.823580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:25:47.823742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:25:47.832726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:25:47.833770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:25:47.836709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:25:47.839228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:25:47.847778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.848043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:25:47.866914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:25:47.867141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:25:47.867212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:25:47.867273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:25:47.867590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:25:48.037355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.038966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:25:48.039540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 11:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:30:04.874193Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-10T12:30:04.874811Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:30:04.874858Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:30:04.875057Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:30:04.875145Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:30:04.875227Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:30:04.875297Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:30:04.875381Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-10T12:30:04.875464Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:30:04.875544Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:30:04.875606Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:30:04.875824Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-10T12:30:04.875898Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-10T12:30:04.875955Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-10T12:30:04.875995Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-10T12:30:04.877028Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:30:04.877135Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:30:04.877188Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:30:04.877267Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:30:04.877356Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-10T12:30:04.878086Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:30:04.878152Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:30:04.878178Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:30:04.878204Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-10T12:30:04.878232Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-10T12:30:04.878301Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-10T12:30:04.882139Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:30:04.882267Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:30:04.882534Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:30:04.882590Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:30:04.883042Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:30:04.883149Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:30:04.883198Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:880:2831] TestWaitNotification: OK eventTxId 102 2026-01-10T12:30:04.883931Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:30:04.884276Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 402us result status StatusSuccess 2026-01-10T12:30:04.884789Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:30:04.885586Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:30:04.885848Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 323us result status StatusSuccess 2026-01-10T12:30:04.886351Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:30:05.137418Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__find_subdomain_path_id.cpp:20: FindTabletSubDomainPathId for tablet 72075186233409546 >> TNodeBrokerTest::NodesMigration1000Nodes >> TNodeBrokerTest::NodesV2BackMigration |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove >> TNodeBrokerTest::RegistrationPipelining >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2026-01-10T12:30:05.458624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:05.458703Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::NoEffectBeforeCommit |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2026-01-10T12:29:13.994438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:14.026123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:14.026341Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:14.033645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:14.033901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:14.034122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:14.034243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:14.034360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:14.034477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:14.034583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:14.034686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:14.034783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:14.034932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:14.035048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:14.035152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:14.035248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:14.066322Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:14.066710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:14.066760Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:14.066941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:14.067104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:14.067174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:14.067214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:14.067318Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:14.067383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:14.067448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:14.067485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:14.067653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:14.067710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:14.067751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:14.067787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:14.067885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:14.067939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:14.067979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:14.068016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:14.068061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:14.068119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:14.068157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:14.068199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:14.068255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:14.068289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:14.068520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:14.068656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:14.068692Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:14.068844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:14.068887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:14.068917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:14.068968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:14.069020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:14.069046Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:14.069115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:14.069157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:14.069185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:14.069296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:14.069370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-10T12:30:04.048341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1148; 2026-01-10T12:30:04.048414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=67301; 2026-01-10T12:30:04.048468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=67447; 2026-01-10T12:30:04.048563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=28; 2026-01-10T12:30:04.049019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=392; 2026-01-10T12:30:04.049073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=68570; 2026-01-10T12:30:04.049274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2026-01-10T12:30:04.049422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=84; 2026-01-10T12:30:04.049959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=456; 2026-01-10T12:30:04.050440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=414; 2026-01-10T12:30:04.076423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=25883; 2026-01-10T12:30:04.102468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=25864; 2026-01-10T12:30:04.102647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=48; 2026-01-10T12:30:04.102726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-10T12:30:04.102779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-10T12:30:04.102867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2026-01-10T12:30:04.102920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:30:04.103021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=63; 2026-01-10T12:30:04.103071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-10T12:30:04.103151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2026-01-10T12:30:04.103255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2026-01-10T12:30:04.103356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=57; 2026-01-10T12:30:04.103442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=132990; 2026-01-10T12:30:04.103636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:04.103779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:04.103856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:04.103958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:04.104029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:04.104297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:04.104379Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:04.104429Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:04.104483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:04.104564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047858134;tx_id=18446744073709551615;;current_snapshot_ts=1768048155524; 2026-01-10T12:30:04.104617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:04.104668Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:04.104712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:04.104825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:04.105060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.165000s; 2026-01-10T12:30:04.108743Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:04.109484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:04.109579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:04.109694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:04.109760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:04.109840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047858134;tx_id=18446744073709551615;;current_snapshot_ts=1768048155524; 2026-01-10T12:30:04.109893Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:04.109972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:04.110019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:04.110110Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:04.110169Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:04.110792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.184000s; 2026-01-10T12:30:04.110873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] Test command err: 2026-01-10T12:30:04.405303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:04.405384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:06.463891Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2026-01-10T12:30:05.804159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:05.804239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2026-01-10T12:30:05.417631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:05.417715Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::NodesMigrationNewExpiredNode >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::SyncNodes >> TNodeBrokerTest::RegistrationPipelining [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2026-01-10T12:30:07.132226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:07.132300Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] Test command err: 2026-01-10T12:30:05.833736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:05.833819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2026-01-10T12:30:08.197088Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1024] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration |96.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2026-01-10T12:30:08.458004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.458093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2026-01-10T12:30:08.278893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.278970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2026-01-10T12:30:08.663705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.663771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease >> TNodeBrokerTest::NodesMigrationRemoveActive >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] >> TNodeBrokerTest::NodesV2BackMigration [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TNodeBrokerTest::ConfigPipelining >> TNodeBrokerTest::ResolveScopeIdForServerless >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] >> TNodeBrokerTest::SyncNodes [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> TNodeBrokerTest::NodesAlreadyMigrated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2026-01-10T12:30:08.659167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.659269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:09.792233Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration [GOOD] Test command err: 2026-01-10T12:30:08.109457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.109539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] Test command err: 2026-01-10T12:30:10.057291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:10.057359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2026-01-10T12:30:10.430772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:10.430830Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::ShiftIdRangeRemoveActive |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2026-01-10T12:30:10.408228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:10.408299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:10.493932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:30:10.521182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2026-01-10T12:30:08.105436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:08.105521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2026-01-10T12:30:12.377136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:12.377203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:12.517696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-10T12:30:12.681603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2026-01-10T12:29:23.800175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:23.828109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:23.828361Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:23.835529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:23.835777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:23.835989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:23.836135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:23.836269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:23.836379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:23.836482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:23.836600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:23.836692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:23.836851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.836955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:23.837068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:23.837179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:23.867252Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:23.867670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:23.867728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:23.867908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.868095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:23.868172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:23.868214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:23.868327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:23.868493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:23.868540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:23.868573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:23.868749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.868821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:23.868866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:23.868904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:23.869020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:23.869075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:23.869114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:23.869148Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:23.869191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:23.869226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:23.869269Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:23.869323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:23.869364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:23.869413Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:23.869665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:23.869787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:23.869834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:23.869994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:23.870038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.870070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.870125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:23.870189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:23.870217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:23.870264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:23.870297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:23.870327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:23.870444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:23.870499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-10T12:30:09.821855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=639; 2026-01-10T12:30:09.821893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=45501; 2026-01-10T12:30:09.821925Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=45601; 2026-01-10T12:30:09.821983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-10T12:30:09.822206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=193; 2026-01-10T12:30:09.822234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=46383; 2026-01-10T12:30:09.822340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=70; 2026-01-10T12:30:09.822428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=50; 2026-01-10T12:30:09.822663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=196; 2026-01-10T12:30:09.822847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=154; 2026-01-10T12:30:09.831821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8926; 2026-01-10T12:30:09.841853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9936; 2026-01-10T12:30:09.841927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-10T12:30:09.841964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-10T12:30:09.842010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:30:09.842065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2026-01-10T12:30:09.842113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:30:09.842190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-10T12:30:09.842234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-10T12:30:09.842287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2026-01-10T12:30:09.842347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=36; 2026-01-10T12:30:09.842402Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=32; 2026-01-10T12:30:09.842427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=75365; 2026-01-10T12:30:09.842524Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:09.842608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:09.842647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:09.842692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:09.842722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:09.842849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:09.842905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:09.842939Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:09.842977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:09.843038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047867936;tx_id=18446744073709551615;;current_snapshot_ts=1768048165336; 2026-01-10T12:30:09.843076Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:09.843107Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:09.843130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:09.843193Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:09.843323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.087000s; 2026-01-10T12:30:09.845294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:09.845457Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:09.845491Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:09.845538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:09.845572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:09.845618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047867936;tx_id=18446744073709551615;;current_snapshot_ts=1768048165336; 2026-01-10T12:30:09.845647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:09.845676Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:09.845702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:09.845750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:09.845784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:09.846225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.121000s; 2026-01-10T12:30:09.846255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TLocalTests::TestRemoveTenantWhileResolving >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted >> TNodeBrokerTest::ExtendLeasePipelining ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2026-01-10T12:30:11.927505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:11.927586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2026-01-10T12:30:11.696835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:11.696914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2026-01-10T12:30:07.386536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:07.386623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:12.170203Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:12.170272Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:12.298446Z node 9 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] Test command err: 2026-01-10T12:30:11.096036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:11.096151Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> TNodeBrokerTest::NodeNameReuseRestart >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::BasicFunctionality >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2026-01-10T12:29:40.963731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:40.986335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:40.986515Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:40.993718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:40.994006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:40.994211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:40.994329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:40.994452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:40.994575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:40.994708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:40.994828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:40.994927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:40.995063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:40.995214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:40.995323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:40.995429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:41.026233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:41.026625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:41.026703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:41.026893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:41.027048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:41.027116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:41.027157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:41.027266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:41.027347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:41.027390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:41.027419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:41.027623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:41.027701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:41.027745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:41.027785Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:41.027889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:41.027946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:41.027990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:41.028044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:41.028113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:41.028155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:41.028183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:41.028226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:41.028263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:41.028295Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:41.028537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:41.028680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:41.028726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:41.028862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:41.028903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:41.028938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:41.028999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:41.029068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:41.029104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:41.029151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:41.029187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:41.029220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:41.029363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:41.029434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:30:11.851769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=633; 2026-01-10T12:30:11.851804Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=54504; 2026-01-10T12:30:11.851835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=54583; 2026-01-10T12:30:11.851881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-10T12:30:11.852131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=216; 2026-01-10T12:30:11.852163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=55224; 2026-01-10T12:30:11.852272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=73; 2026-01-10T12:30:11.852384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2026-01-10T12:30:11.852851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=410; 2026-01-10T12:30:11.853136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=231; 2026-01-10T12:30:11.861869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8679; 2026-01-10T12:30:11.872029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10073; 2026-01-10T12:30:11.872134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:30:11.872177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-10T12:30:11.872217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:30:11.872293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2026-01-10T12:30:11.872322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:30:11.872390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=39; 2026-01-10T12:30:11.872433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:30:11.872478Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2026-01-10T12:30:11.872565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2026-01-10T12:30:11.872642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=43; 2026-01-10T12:30:11.872678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=82003; 2026-01-10T12:30:11.872798Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:11.872878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:11.872915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:11.872961Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:11.872992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:11.873125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:11.873196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:11.873225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:11.873256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:11.873305Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047883383;tx_id=18446744073709551615;;current_snapshot_ts=1768048182448; 2026-01-10T12:30:11.873343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:11.873372Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:11.873394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:11.873468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:11.873621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.018000s; 2026-01-10T12:30:11.877304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:11.877463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:11.877502Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:11.877560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:11.877596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:11.877655Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047883383;tx_id=18446744073709551615;;current_snapshot_ts=1768048182448; 2026-01-10T12:30:11.877690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:11.877748Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:11.877795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:11.877902Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:11.877937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:11.878800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.058000s; 2026-01-10T12:30:11.878848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::NodesMigrationNodeName >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TNodeBrokerTest::NodesMigrationNewActiveNode >> TNodeBrokerTest::NodesMigration2000Nodes >> TDynamicNameserverTest::TestCacheUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2026-01-10T12:29:36.313632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:36.335782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:36.336029Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:36.343368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:36.343633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:36.343860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:36.343974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:36.344117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:36.344274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:36.344392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:36.344512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:36.344617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:36.344750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.344882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:36.344996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:36.345096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:36.366002Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:36.366338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:36.366384Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:36.366539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.366678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:36.366732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:36.366768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:36.366848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:36.366901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:36.366931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:36.366963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:36.367109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.367163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:36.367202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:36.367228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:36.367309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:36.367351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:36.367384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:36.367403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:36.367437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:36.367464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:36.367488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:36.367536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:36.367567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:36.367585Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:36.367754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:36.367846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:36.367871Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:36.367963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:36.368000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.368018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.368063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:36.368127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:36.368151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:36.368181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:36.368209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:36.368229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:36.368350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:36.368424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-10T12:30:12.766974Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=853; 2026-01-10T12:30:12.767024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=46639; 2026-01-10T12:30:12.767068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=46731; 2026-01-10T12:30:12.767127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:30:12.767527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=349; 2026-01-10T12:30:12.767572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=47573; 2026-01-10T12:30:12.767728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=104; 2026-01-10T12:30:12.767890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=109; 2026-01-10T12:30:12.768356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=402; 2026-01-10T12:30:12.768689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=282; 2026-01-10T12:30:12.784138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15376; 2026-01-10T12:30:12.798279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14026; 2026-01-10T12:30:12.798390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-01-10T12:30:12.798448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:30:12.798494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:30:12.798609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=74; 2026-01-10T12:30:12.798678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:30:12.798776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2026-01-10T12:30:12.798822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:30:12.798885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-10T12:30:12.798974Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-10T12:30:12.799058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=52; 2026-01-10T12:30:12.799095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=84651; 2026-01-10T12:30:12.799230Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:12.799344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:12.799398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:12.799462Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:12.799531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:12.799705Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:12.799767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:12.799818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:12.799864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:12.799940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047878766;tx_id=18446744073709551615;;current_snapshot_ts=1768048177789; 2026-01-10T12:30:12.799978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:12.800019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:12.800053Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:12.800147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:12.800322Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.013000s; 2026-01-10T12:30:12.804177Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:12.804573Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:12.804614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:12.804685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:12.804727Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:12.804778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047878766;tx_id=18446744073709551615;;current_snapshot_ts=1768048177789; 2026-01-10T12:30:12.804858Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:12.804896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:12.804924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:12.804981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:12.805027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:12.805852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.144000s; 2026-01-10T12:30:12.805896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2026-01-10T12:30:12.683564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:12.683638Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] >> TLocalTests::TestAddTenant [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2026-01-10T12:30:13.293709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:13.293798Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:14.339172Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-10T12:30:15.694258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:15.694326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2026-01-10T12:30:16.283211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:16.283268Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2026-01-10T12:30:13.182868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:13.182934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2026-01-10T12:30:16.597239Z node 1 :LOCAL ERROR: local.cpp:1299: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2026-01-10T12:30:16.597462Z node 1 :LOCAL ERROR: local.cpp:1549: Unknown domain dc-3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] Test command err: 2026-01-10T12:30:13.002725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:13.002816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::FixedNodeId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2026-01-10T12:30:15.289187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:15.289260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:15.378287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::NodesMigration1001Nodes >> TNodeBrokerTest::NodesMigrationExpiredChanged ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2026-01-10T12:30:14.852172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:14.852235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2026-01-10T12:29:09.343492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:09.384434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:09.384768Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:09.394069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:09.394384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:09.394666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:09.394831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:09.394959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:09.395078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:09.395257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:09.395416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:09.395582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:09.395766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.395938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:09.396092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:09.396252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:09.433225Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:09.433715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:09.433786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:09.434028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.434231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:09.434327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:09.434382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:09.434501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:09.434576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:09.434624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:09.434659Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:09.434873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:09.434948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:09.434994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:09.435055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:09.435135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:09.435174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:09.435219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:09.435245Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:09.435282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:09.435308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:09.435332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:09.435363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:09.435389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:09.435410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:09.435570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:09.435650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:09.435672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:09.435756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:09.435782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.435801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:09.435849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:09.435889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:09.435916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:09.435956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:09.435980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:09.436006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:09.436135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:09.436187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... umn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5593; 2026-01-10T12:30:14.728784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:30:14.729661Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=820; 2026-01-10T12:30:14.729729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6713; 2026-01-10T12:30:14.729785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6835; 2026-01-10T12:30:14.729850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:30:14.729946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2026-01-10T12:30:14.730006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7380; 2026-01-10T12:30:14.730149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2026-01-10T12:30:14.730241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2026-01-10T12:30:14.730374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=100; 2026-01-10T12:30:14.730490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=84; 2026-01-10T12:30:14.731953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1428; 2026-01-10T12:30:14.733443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1436; 2026-01-10T12:30:14.733492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-10T12:30:14.733593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=69; 2026-01-10T12:30:14.733630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:30:14.733700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2026-01-10T12:30:14.733735Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:30:14.733795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=38; 2026-01-10T12:30:14.733823Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:30:14.733871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2026-01-10T12:30:14.733956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2026-01-10T12:30:14.734042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=53; 2026-01-10T12:30:14.734069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19345; 2026-01-10T12:30:14.734194Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:14.734283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:14.734345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:14.734402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:14.734459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:14.734561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:14.734609Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:14.734640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:14.734684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:14.734734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:14.734775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:14.734805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:14.734880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:14.735039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.108000s; 2026-01-10T12:30:14.737501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:14.737674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:14.737742Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:14.737824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:14.737871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:14.737921Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:14.738025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:14.738086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:14.738138Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:14.738220Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:14.738263Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:14.738805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.165000s; 2026-01-10T12:30:14.738845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] Test command err: 2026-01-10T12:30:16.110597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:16.110655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2026-01-10T12:30:15.334263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:15.334358Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-10T12:30:16.561356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:16.561415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:17.752860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:17.752932Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TNodeBrokerTest::LoadStateMoveEpoch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] Test command err: 2026-01-10T12:30:16.762521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:16.762602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted >> TNodeBrokerTest::NodeNameExpiration >> TNodeBrokerTest::ExtendLeaseBumpVersion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2026-01-10T12:30:15.135447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:15.135517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:17.034542Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1142: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TSlotIndexesPoolTest::Ranges [GOOD] >> TLocalTests::TestAlterTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2026-01-10T12:30:17.934558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:17.934625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2026-01-10T12:30:15.401358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:15.401441Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:16.984748Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001, expected = DC=1/M=2/R=3/U=4/, got = DC=1/M=2/R=3/U=5/ 2026-01-10T12:30:16.999090Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-10T12:30:16.999630Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-10T12:30:17.000104Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> TNodeBrokerTest::ShiftIdRangeRemoveExpired |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2026-01-10T12:30:19.345760Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:30:19.346308Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:30:19.346950Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7664432131272892668 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:30:19.353865Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:30:19.354417Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:30:19.354641Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/0024c5/r3tmp/tmpsBJnJs/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1262917263886439911 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving >> TLocalTests::TestAddTenantWhileResolving [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] >> TNodeBrokerTest::ExtendLeaseBumpVersion [GOOD] >> TNodeBrokerTest::EpochCacheUpdate >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2026-01-10T12:30:18.896799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:18.896879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2026-01-10T12:30:19.427549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:19.427618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] Test command err: 2026-01-10T12:30:16.679304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:16.679380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] >> Secret::SimpleQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2026-01-10T12:29:46.537245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:46.559719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:46.559937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:46.566273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:46.566477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:46.566663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:46.566734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:46.566823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:46.566910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:46.567002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:46.567074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:46.567140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:46.567250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.567322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:46.567391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:46.567452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:46.589680Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:46.590018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:46.590086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:46.590247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:46.590403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:46.590478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:46.590527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:46.590646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:46.590724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:46.590762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:46.590805Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:46.590960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:46.591009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:46.591043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:46.591070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:46.591142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:46.591196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:46.591226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:46.591250Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:46.591283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:46.591313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:46.591347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:46.591381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:46.591419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:46.591452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:46.591657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:46.591835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:46.591864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:46.592086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:46.592122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.592144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:46.592187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:46.592252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:46.592279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:46.592312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:46.592339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:46.592398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:46.592500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:46.592545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-10T12:30:19.334731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=977; 2026-01-10T12:30:19.334790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=76334; 2026-01-10T12:30:19.334836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=76452; 2026-01-10T12:30:19.334897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:30:19.335321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=374; 2026-01-10T12:30:19.335368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=77417; 2026-01-10T12:30:19.335521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=101; 2026-01-10T12:30:19.335645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2026-01-10T12:30:19.336132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=441; 2026-01-10T12:30:19.336593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=408; 2026-01-10T12:30:19.355382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18703; 2026-01-10T12:30:19.370615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15094; 2026-01-10T12:30:19.370744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-10T12:30:19.370806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-10T12:30:19.370849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:30:19.370931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2026-01-10T12:30:19.370978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:30:19.371068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2026-01-10T12:30:19.371114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-10T12:30:19.371184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-01-10T12:30:19.371277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-10T12:30:19.371366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2026-01-10T12:30:19.371407Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=122233; 2026-01-10T12:30:19.371543Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:19.371663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:19.371708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:19.371777Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:19.371821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:19.372016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:19.372122Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:19.372172Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:19.372214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:19.372292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047888944;tx_id=18446744073709551615;;current_snapshot_ts=1768048188009; 2026-01-10T12:30:19.372337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:19.372378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:19.372411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:19.372492Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:19.372676Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.179000s; 2026-01-10T12:30:19.376388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:19.376578Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:19.376618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:19.376695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:19.376731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:19.376796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047888944;tx_id=18446744073709551615;;current_snapshot_ts=1768048188009; 2026-01-10T12:30:19.376844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:19.376878Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:19.376904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:19.376988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:19.377029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:19.377798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.136000s; 2026-01-10T12:30:19.377835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-10T12:30:14.857295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:14.857357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> Secret::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2026-01-10T12:30:18.843526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:18.843592Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2026-01-10T12:30:19.641749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:19.641811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:19.730426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:05.561676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:05.720144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:05.735264Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:05.735929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:05.736059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:06.236656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:06.236836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:06.243906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048082616233 != 1768048082616236 2026-01-10T12:28:06.269666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:06.321280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:06.422845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:07.212104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:07.212745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:07.212853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:07.212903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:07.213442Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:07.227920Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-10T12:28:18.987179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2866], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:18.987332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2871], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:18.990452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:18.992267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1027:2876], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:18.992441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:18.999547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:19.021195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1025:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:28:19.086293Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1078:2908] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:19.425577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:20.492628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:20.959478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:21.718410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:22.381137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:22.758578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:24.014770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-10T12:28:24.493369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-10T12:28:27.586841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:27.586904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:26.664932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:27.722990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:29.333970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:29:29.763391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 E0110 12:30:23.004959133 502878 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-10T12:30:23.004771354+00:00"} |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.2%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2026-01-10T12:30:20.863223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:20.863308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:22.345650Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:05.561472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:05.720168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:05.735294Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:05.735855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:05.735970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:06.249187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:06.249355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:06.257206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048082616235 != 1768048082616238 2026-01-10T12:28:06.271536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:06.321359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:06.422662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:07.214526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:07.215106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:07.215169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:07.215218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:07.215776Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:07.230474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-10T12:28:19.368132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1013:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.368317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.368857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1040:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.368924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.378291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:19.559373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1132:2947], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.559528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.559806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1137:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.559971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1138:2953], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.560204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:19.564103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:19.672703Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1141:2956], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:28:20.247859Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1233:3021] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:20.855679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:21.270351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:21.884989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:22.503675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:22.990591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:24.151234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-10T12:28:24.573638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-10T12:28:27.581049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:27.581127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:27.054025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715740:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:28.093090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715747:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:29.893150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715758:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:29:30.394408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715761:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-10T12:30:19.623735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:19.623815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2026-01-10T12:30:20.879704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:20.879762Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:20.958997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2026-01-10T12:29:15.343391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:15.372338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:15.372585Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:15.379981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:15.380278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:15.380495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:15.380639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:15.380749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:15.380844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:15.380981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:15.381117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:15.381234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:15.381352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:15.381503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:15.381610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:15.381740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:15.411902Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:15.412424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:15.412480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:15.412671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:15.412850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:15.412922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:15.412964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:15.413074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:15.413158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:15.413201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:15.413230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:15.413401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:15.413463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:15.413517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:15.413559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:15.413660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:15.413731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:15.413818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:15.413849Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:15.413914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:15.413965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:15.414017Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:15.414063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:15.414100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:15.414132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:15.414348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:15.414477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:15.414516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:15.414681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:15.414722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:15.414756Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:15.414808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:15.414866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:15.414894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:15.414939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:15.414988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:15.415030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:15.415156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:15.415229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=10751; 2026-01-10T12:30:21.494388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-10T12:30:21.495593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1107; 2026-01-10T12:30:21.495675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=12334; 2026-01-10T12:30:21.495733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=12512; 2026-01-10T12:30:21.495810Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-10T12:30:21.495940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=69; 2026-01-10T12:30:21.496006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=13411; 2026-01-10T12:30:21.496272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=189; 2026-01-10T12:30:21.496486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=143; 2026-01-10T12:30:21.496817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=260; 2026-01-10T12:30:21.497091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=205; 2026-01-10T12:30:21.502553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5373; 2026-01-10T12:30:21.508126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5423; 2026-01-10T12:30:21.508266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-10T12:30:21.508339Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2026-01-10T12:30:21.508392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-10T12:30:21.508504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=67; 2026-01-10T12:30:21.508563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:30:21.508701Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=90; 2026-01-10T12:30:21.508775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-10T12:30:21.508870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=48; 2026-01-10T12:30:21.509036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=104; 2026-01-10T12:30:21.509159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=69; 2026-01-10T12:30:21.509220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=39019; 2026-01-10T12:30:21.509425Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:21.509587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:21.509713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:21.509811Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:21.509876Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:21.510086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:21.510182Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:21.510234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:21.510293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:21.510391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:21.510455Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:21.510549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:21.510686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:21.510968Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.091000s; 2026-01-10T12:30:21.514033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:21.515121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:21.515206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:21.515324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:21.515393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:21.515459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:21.515544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:21.515613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:21.515694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:21.515809Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:21.515886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:21.516834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.170000s; 2026-01-10T12:30:21.516900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] Test command err: 2026-01-10T12:30:19.776416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:19.776474Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:30:22.540127Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:22.540183Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2026-01-10T12:29:16.795157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:16.828912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:16.829164Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:16.837166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:16.837460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:16.837701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:16.837853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:16.837987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:16.838097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:16.838227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:16.838345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:16.838464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:16.838598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:16.838730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:16.838843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:16.838973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:16.871385Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:16.871774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:16.871845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:16.872061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:16.872265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:16.872341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:16.872389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:16.872512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:16.872581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:16.872630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:16.872667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:16.872845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:16.872922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:16.872975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:16.873015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:16.873121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:16.873215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:16.873261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:16.873300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:16.873363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:16.873409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:16.873445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:16.873489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:16.873529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:16.873564Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:16.873818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:16.873941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:16.873977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:16.874113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:16.874156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:16.874186Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:16.874243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:16.874299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:16.874331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:16.874387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:16.874439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:16.874486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:16.874678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:16.874746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7595; 2026-01-10T12:30:23.119364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:30:23.120217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=801; 2026-01-10T12:30:23.120279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8746; 2026-01-10T12:30:23.120323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8889; 2026-01-10T12:30:23.120408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-10T12:30:23.120500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=46; 2026-01-10T12:30:23.120538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9642; 2026-01-10T12:30:23.120711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2026-01-10T12:30:23.120849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=96; 2026-01-10T12:30:23.121035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=142; 2026-01-10T12:30:23.121194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=118; 2026-01-10T12:30:23.123901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2660; 2026-01-10T12:30:23.126795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2821; 2026-01-10T12:30:23.126896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:30:23.126952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-10T12:30:23.127004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:30:23.127091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-10T12:30:23.127132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:30:23.127222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2026-01-10T12:30:23.127263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:30:23.127321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-10T12:30:23.127422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=64; 2026-01-10T12:30:23.127521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=57; 2026-01-10T12:30:23.127565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27337; 2026-01-10T12:30:23.127726Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:23.127851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:23.127936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:23.128012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:23.128091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:23.128228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:23.128295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:23.128338Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:23.128389Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:23.128459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:23.128511Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:23.128552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:23.128649Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:23.128880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.122000s; 2026-01-10T12:30:23.131065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:23.132111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:23.132175Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:23.132254Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:23.132297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:23.132361Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:23.132435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:23.132490Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:23.132543Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:23.132625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:23.132687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:23.133301Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.188000s; 2026-01-10T12:30:23.133386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2026-01-10T12:30:20.901070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:20.901148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2026-01-10T12:30:24.829985Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593709303841477128:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:30:24.830079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:30:25.107465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:30:25.155826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:30:25.155895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:30:25.160400Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:26885) connection closed with error: Connection refused 2026-01-10T12:30:25.161525Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-10T12:30:25.164549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:30:25.189058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:25.254969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:30:25.254997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:30:25.255093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:30:25.255219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:30:25.305054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:30:25.837924Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2026-01-10T12:30:24.829171Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593709301085965632:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:30:24.829267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:30:25.080327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:30:25.125981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:30:25.126109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:30:25.191935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:30:25.200191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:25.228809Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593709301085965596:2081] 1768048224825819 != 1768048224825822 2026-01-10T12:30:25.254690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002983/r3tmp/yandexpidEoO.tmp 2026-01-10T12:30:25.254716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002983/r3tmp/yandexpidEoO.tmp 2026-01-10T12:30:25.255832Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2026-01-10T12:30:25.255883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/d9xi/002983/r3tmp/yandexpidEoO.tmp 2026-01-10T12:30:25.256016Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:30:25.335573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> test_canonical_records.py::test_create_drop_and_alter_table |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2026-01-10T12:29:38.267858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:38.291751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:38.291982Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:38.298014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:38.298196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:38.298407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:38.298497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:38.298589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:38.298664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:38.298751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:38.298833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:38.298910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:38.299015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.299087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:38.299171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:38.299257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:38.322967Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:38.323263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:38.323319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:38.323455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:38.323585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:38.323643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:38.323687Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:38.323764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:38.323811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:38.323846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:38.323876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:38.323988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:38.324038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:38.324094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:38.324124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:38.324194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:38.324230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:38.324257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:38.324283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:38.324334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:38.324379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:38.324409Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:38.324487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:38.324527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:38.324549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:38.324765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:38.324852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:38.324879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:38.325030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:38.325058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.325078Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:38.325136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:38.325197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:38.325216Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:38.325245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:38.325286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:38.325324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:38.325474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:38.325589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-10T12:30:24.930295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=894; 2026-01-10T12:30:24.930349Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=55070; 2026-01-10T12:30:24.930389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=55188; 2026-01-10T12:30:24.930445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:30:24.930718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=236; 2026-01-10T12:30:24.930752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=55919; 2026-01-10T12:30:24.930877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2026-01-10T12:30:24.930969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2026-01-10T12:30:24.931228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=222; 2026-01-10T12:30:24.931430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=169; 2026-01-10T12:30:24.941627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10141; 2026-01-10T12:30:24.952265Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10531; 2026-01-10T12:30:24.952362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-10T12:30:24.952406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-10T12:30:24.952435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-10T12:30:24.952511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-01-10T12:30:24.952546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:30:24.952644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=62; 2026-01-10T12:30:24.952698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-10T12:30:24.952770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2026-01-10T12:30:24.952840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2026-01-10T12:30:24.952909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=40; 2026-01-10T12:30:24.952939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=85336; 2026-01-10T12:30:24.953052Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:24.953141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:24.953190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:24.953266Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:24.953328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:24.953545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:24.953595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:24.953627Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:24.953662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:24.953715Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047882400;tx_id=18446744073709551615;;current_snapshot_ts=1768048179801; 2026-01-10T12:30:24.953756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:24.953800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:24.953831Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:24.953904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:24.954088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.018000s; 2026-01-10T12:30:24.956801Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:24.957039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:24.957081Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:24.957142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:24.957188Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:24.957241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047882400;tx_id=18446744073709551615;;current_snapshot_ts=1768048179801; 2026-01-10T12:30:24.957279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:24.957314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:24.957349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:24.957410Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:24.957456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:24.957976Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.031000s; 2026-01-10T12:30:24.958014Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> BasicStatistics::StatisticsOnShardsRestart [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2026-01-10T12:29:41.006734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:41.040692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:41.040945Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:41.048277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:41.048532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:41.048762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:41.048892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:41.049025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:41.049132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:41.049243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:41.049346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:41.049446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:41.049610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:41.049722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:41.049820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:41.049947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:41.080550Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:41.080931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:41.080987Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:41.081188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:41.081354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:41.081423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:41.081467Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:41.081582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:41.081671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:41.081717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:41.081749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:41.081968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:41.082039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:41.082084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:41.082124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:41.082224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:41.082278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:41.082329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:41.082358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:41.082404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:41.082443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:41.082490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:41.082547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:41.082585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:41.082633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:41.082859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:41.082978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:41.083013Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:41.083139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:41.083184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:41.083232Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:41.083293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:41.083345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:41.083385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:41.083442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:41.083479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:41.083510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:41.083694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:41.083758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ad_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:30:26.587485Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=852; 2026-01-10T12:30:26.587535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=42959; 2026-01-10T12:30:26.587582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=43053; 2026-01-10T12:30:26.587645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-10T12:30:26.587952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=264; 2026-01-10T12:30:26.587990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43750; 2026-01-10T12:30:26.588134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=97; 2026-01-10T12:30:26.588219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=53; 2026-01-10T12:30:26.588435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=188; 2026-01-10T12:30:26.588610Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=149; 2026-01-10T12:30:26.598584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9911; 2026-01-10T12:30:26.607244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8562; 2026-01-10T12:30:26.607325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:30:26.607363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-10T12:30:26.607391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:30:26.607440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2026-01-10T12:30:26.607467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-10T12:30:26.607526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2026-01-10T12:30:26.607552Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:30:26.607590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2026-01-10T12:30:26.607644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=33; 2026-01-10T12:30:26.607694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=30; 2026-01-10T12:30:26.607718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=70961; 2026-01-10T12:30:26.607826Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:26.607909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:26.607947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:26.607996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:26.608027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:26.608242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:26.608308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:26.608344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:26.608385Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:26.608447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047885081;tx_id=18446744073709551615;;current_snapshot_ts=1768048182537; 2026-01-10T12:30:26.608489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:26.608531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.608564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.608654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:26.608836Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.077000s; 2026-01-10T12:30:26.611004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:26.611199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:26.611235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:26.611299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:26.611338Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:26.611384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047885081;tx_id=18446744073709551615;;current_snapshot_ts=1768048182537; 2026-01-10T12:30:26.611413Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:26.611443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.611468Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.611528Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:26.611560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:26.611961Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.012000s; 2026-01-10T12:30:26.611990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2026-01-10T12:29:23.116199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:23.150125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:23.150396Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:23.158603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:23.158890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:23.159166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:23.159298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:23.159405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:23.159518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:23.159649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:23.159753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:23.159873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:23.160017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.160173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:23.160325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:23.160436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:23.192232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:23.192655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:23.192731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:23.192939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.193104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:23.193183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:23.193234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:23.193362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:23.193443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:23.193499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:23.193545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:23.193755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.193843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:23.193897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:23.193937Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:23.194046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:23.194106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:23.194206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:23.194244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:23.194301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:23.194346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:23.194382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:23.194430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:23.194476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:23.194507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:23.194744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:23.194868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:23.194908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:23.195076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:23.195131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.195162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.195227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:23.195291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:23.195345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:23.195400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:23.195442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:23.195479Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:23.195620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:23.195694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5309; 2026-01-10T12:30:26.491834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-10T12:30:26.492447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=561; 2026-01-10T12:30:26.492496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6225; 2026-01-10T12:30:26.492533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6374; 2026-01-10T12:30:26.492600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-10T12:30:26.492672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=37; 2026-01-10T12:30:26.492706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6960; 2026-01-10T12:30:26.492867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=97; 2026-01-10T12:30:26.492974Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2026-01-10T12:30:26.493123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=119; 2026-01-10T12:30:26.493279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=123; 2026-01-10T12:30:26.494928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1603; 2026-01-10T12:30:26.496641Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1652; 2026-01-10T12:30:26.496710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-10T12:30:26.496745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-01-10T12:30:26.496786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:30:26.496840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2026-01-10T12:30:26.496865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=2; 2026-01-10T12:30:26.496939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-01-10T12:30:26.496966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-10T12:30:26.497026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2026-01-10T12:30:26.497094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2026-01-10T12:30:26.497184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=65; 2026-01-10T12:30:26.497222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20132; 2026-01-10T12:30:26.497375Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:26.497471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:26.497523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:26.497585Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:26.497633Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:26.497742Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:26.497799Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:26.497831Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:26.497870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:26.497927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:26.497970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.498003Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.498116Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:26.498285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.167000s; 2026-01-10T12:30:26.499981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:26.501186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:26.501260Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:26.501347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:26.501401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:26.501465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:26.501536Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:26.501589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.501639Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:26.501723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:26.501781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:26.502520Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.112000s; 2026-01-10T12:30:26.502576Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:23:59.026179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:23:59.141623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:23:59.153991Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:23:59.154493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:23:59.154585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:23:59.611167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:23:59.736290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:23:59.736409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:23:59.778007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:23:59.910818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:00.894238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:00.895391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:00.895445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:00.895485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:00.896251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:00.956612Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:01.559055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:04.886529Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:04.894346Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:04.897868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:04.970614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.970746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.015441Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:05.018373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.182757Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.183612Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.186741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.187711Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.187891Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.188212Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.188404Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.188553Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.188692Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:05.393466Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:05.416210Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:05.416355Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:05.435579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:05.623830Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:05.670154Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:05.670283Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:05.708830Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:05.714115Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:05.714386Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:05.714466Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:05.714528Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:05.714586Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:05.714642Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:05.714700Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:05.717337Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:05.855803Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.855914Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2208:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:05.896685Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2240:2634] 2026-01-10T12:24:05.897045Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2240:2634], schemeshard id = 72075186224037897 2026-01-10T12:24:06.114163Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2413:2743] 2026-01-10T12:24:06.114679Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:06.117234Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Describe result: PathErrorUnknown 2026-01-10T12:24:06.117298Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Creating table 2026-01-10T12:24:06.117410Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:06.126942Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2432:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:06.131171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:06.139723Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:06.139856Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:06.157943Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:06.457102Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:06.713572Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:06.853907Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:06.854052Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Column diff is empty, finishing 2026-01-10T12:24:07.551628Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 2026-01-10T12:29:41.707907Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:29:41.707988Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:29:41.708294Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:29:41.732808Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:29:43.468148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:29:43.468316Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 42 2026-01-10T12:29:43.468440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 42 2026-01-10T12:29:47.341804Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:29:48.266492Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:29:48.266555Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:29:48.266760Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:29:48.280723Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:29:49.961559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:29:49.961791Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 43 2026-01-10T12:29:49.961920Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 43 2026-01-10T12:29:53.645524Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:29:54.575668Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:29:54.575768Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:29:54.576127Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:29:54.590065Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:29:56.271198Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:29:56.271443Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 44 2026-01-10T12:29:56.271572Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 44 2026-01-10T12:29:59.909258Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:30:00.778485Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:30:00.778577Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:30:00.778796Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:30:00.791741Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:30:02.442090Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:30:02.442277Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 45 2026-01-10T12:30:02.442362Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 45 2026-01-10T12:30:06.419097Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-10T12:30:06.419181Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:30:06.419221Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-10T12:30:06.419279Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-10T12:30:06.587578Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:30:07.464926Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:30:07.465007Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:30:07.465203Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:30:07.478526Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:30:09.183712Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:30:09.183971Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 46 2026-01-10T12:30:09.184132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 46 2026-01-10T12:30:12.967552Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:30:13.821827Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:30:13.821911Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:30:13.822231Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:30:13.835957Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:30:15.348566Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:30:15.348800Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 47 2026-01-10T12:30:15.348922Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 47 2026-01-10T12:30:19.051923Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:30:19.960572Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:30:19.960667Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:30:19.960939Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:30:19.974353Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:30:21.644980Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:30:21.645134Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 48 2026-01-10T12:30:21.645207Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 48 2026-01-10T12:30:25.648279Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-10T12:30:26.523536Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-10T12:30:26.523614Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:30:26.523901Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-10T12:30:26.549925Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:30:28.217440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-10T12:30:28.217596Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 49 2026-01-10T12:30:28.217678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 49 2026-01-10T12:30:28.297813Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16630:9850]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:30:28.301076Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-10T12:30:28.301156Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:16630:9850], StatRequests.size() = 1 2026-01-10T12:30:28.305219Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:16646:9854]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:30:28.308482Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-10T12:30:28.308558Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:16646:9854], StatRequests.size() = 1 2026-01-10T12:30:28.312466Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:16662:9858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:30:28.315731Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-10T12:30:28.315801Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 6, ReplyToActorId = [2:16662:9858], StatRequests.size() = 1 |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2026-01-10T12:29:23.507668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:23.527306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:23.527492Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:23.532868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:23.533046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:23.533207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:23.533287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:23.533356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:23.533448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:23.533537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:23.533615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:23.533690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:23.533771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.533874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:23.533942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:23.534014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:23.553243Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:23.553537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:23.553576Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:23.553709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.553846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:23.553901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:23.553930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:23.554005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:23.554052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:23.554083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:23.554103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:23.554251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:23.554315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:23.554350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:23.554374Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:23.554443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:23.554484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:23.554532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:23.554562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:23.554606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:23.554629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:23.554656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:23.554681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:23.554706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:23.554722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:23.554858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:23.554939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:23.554961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:23.555080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:23.555128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.555155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:23.555193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:23.555232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:23.555255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:23.555283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:23.555319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:23.555340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:23.555420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:23.555470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=8147; 2026-01-10T12:30:29.154424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-10T12:30:29.155501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1008; 2026-01-10T12:30:29.155578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9599; 2026-01-10T12:30:29.155637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9794; 2026-01-10T12:30:29.155716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-10T12:30:29.155821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=56; 2026-01-10T12:30:29.155867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10579; 2026-01-10T12:30:29.156122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=167; 2026-01-10T12:30:29.156297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=116; 2026-01-10T12:30:29.156547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=196; 2026-01-10T12:30:29.156744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=143; 2026-01-10T12:30:29.159696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2887; 2026-01-10T12:30:29.162622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2836; 2026-01-10T12:30:29.162714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-10T12:30:29.162770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:30:29.162837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:30:29.162930Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=54; 2026-01-10T12:30:29.162976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:30:29.163099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=85; 2026-01-10T12:30:29.163153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:30:29.163245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=55; 2026-01-10T12:30:29.163372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=79; 2026-01-10T12:30:29.163470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=52; 2026-01-10T12:30:29.163520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29432; 2026-01-10T12:30:29.163719Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:29.163869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:29.163945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:29.164034Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:29.164122Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:29.164287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:29.164367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:29.164441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:29.164497Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:29.164580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:29.164636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:29.164687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:29.164800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:29.165065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.113000s; 2026-01-10T12:30:29.167458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:29.168577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:29.168658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:29.168757Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:29.168828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:29.168892Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:29.168975Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:29.169047Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:29.169108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:29.169229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:29.169310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:29.170108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.124000s; 2026-01-10T12:30:29.170175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-10T12:30:33.737299Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.740793Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.741159Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:33.741246Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.741315Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:33.742060Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:258:2252], now have 1 active actors on pipe 2026-01-10T12:30:33.742204Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.764120Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.764298Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.765487Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.765715Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:33.766261Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:33.766744Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:266:2223] 2026-01-10T12:30:33.769253Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:33.769325Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:33.769490Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:266:2223] 2026-01-10T12:30:33.769566Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:33.769633Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:33.769680Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:33.769719Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:33.769765Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.769800Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:33.769842Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.769883Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:33.770001Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:33.770304Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:33.770782Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:269:2257], now have 1 active actors on pipe 2026-01-10T12:30:33.821882Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.825918Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.826327Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-10T12:30:33.826393Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.826453Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-10T12:30:33.827254Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-10T12:30:33.827379Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.829743Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.829882Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.830660Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.830781Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:33.831031Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:33.831194Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:407:2326] 2026-01-10T12:30:33.832523Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:33.832581Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-10T12:30:33.832623Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:407:2326] 2026-01-10T12:30:33.832671Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:33.832727Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:33.832768Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:33.832804Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:33.832843Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.832875Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:33.832923Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.832950Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-10T12:30:33.833017Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:33.833182Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:33.833550Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-10T12:30:33.852675Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.855928Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.856200Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-10T12:30:33.856251Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.856299Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-10T12:30:33.856919Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:459:2396], now have 1 active actors on pipe 2026-01-10T12:30:33.857012Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.858845Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:33.858989Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.860021Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... ][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.709098Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.709136Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:34.709235Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.709495Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.710067Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:531:2443], now have 1 active actors on pipe 2026-01-10T12:30:34.711359Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:538:2447], now have 1 active actors on pipe 2026-01-10T12:30:34.711422Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:537:2446], now have 1 active actors on pipe 2026-01-10T12:30:34.711895Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:34.712017Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:539:2447], now have 1 active actors on pipe 2026-01-10T12:30:34.712113Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:34.712395Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:34.723472Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:547:2454], now have 1 active actors on pipe 2026-01-10T12:30:34.750157Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.752425Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.753716Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.753783Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:34.753926Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.754308Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.754544Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:603:2457] 2026-01-10T12:30:34.755935Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:34.757005Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:34.757295Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:34.757400Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:34.757621Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:34.757696Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:34.757873Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:34.757933Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:34.757983Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:34.758032Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:34.758146Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:34.758351Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:34.758391Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.758438Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:34.758485Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:603:2457] 2026-01-10T12:30:34.758532Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.758582Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.758617Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.758649Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:34.758687Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.758719Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.758749Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.758790Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:34.758884Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.759045Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.759600Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:538:2447] destroyed 2026-01-10T12:30:34.759663Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:537:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } |96.4%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-10T12:30:33.933627Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.937678Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.938036Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:33.938159Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.938238Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:33.939030Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:256:2250], now have 1 active actors on pipe 2026-01-10T12:30:33.939150Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.958231Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.958417Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.959145Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.959339Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:33.959722Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:33.960137Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:264:2221] 2026-01-10T12:30:33.962597Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:33.962696Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:33.962786Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:264:2221] 2026-01-10T12:30:33.962851Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:33.962932Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:33.962991Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:33.963033Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:33.963086Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.963126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:33.963171Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.963211Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:33.963368Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:33.963659Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:33.964253Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:267:2255], now have 1 active actors on pipe 2026-01-10T12:30:34.017484Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.020670Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.021023Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:34.021074Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.021124Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:34.021834Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:395:2351], now have 1 active actors on pipe 2026-01-10T12:30:34.021966Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.024662Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:34.024768Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.025579Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:34.025717Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.026035Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.026299Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:403:2322] 2026-01-10T12:30:34.028138Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.028203Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:34.028256Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:403:2322] 2026-01-10T12:30:34.028317Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.028378Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.028427Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.028475Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:34.028521Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.028566Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.028608Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.028651Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:34.028791Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.029051Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.029573Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:406:2356], now have 1 active actors on pipe 2026-01-10T12:30:34.030751Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:412:2359], now have 1 active actors on pipe 2026-01-10T12:30:34.031089Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:414:2360], now have 1 active actors on pipe 2026-01-10T12:30:34.031321Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:412:2359] destroyed 2026-01-10T12:30:34.031678Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:414:2360] destroyed 2026-01-10T12:30:34.536033Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.538921Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.539204Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:34.539264Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.539336Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:34.540005Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:259:2253], now have 1 active actors on pipe 2026-01-10T12:30:34.540132Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.542196Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:34.54 ... -10T12:30:34.642438Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.642658Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.643202Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:471:2402], now have 1 active actors on pipe 2026-01-10T12:30:34.662607Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.666384Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.666693Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:34.666767Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.666829Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:34.667601Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:520:2438], now have 1 active actors on pipe 2026-01-10T12:30:34.667741Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.670039Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:34.670192Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.671555Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 6 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:34.671721Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.672128Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.672387Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:528:2409] 2026-01-10T12:30:34.674508Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.674572Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:34.674619Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:528:2409] 2026-01-10T12:30:34.674676Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.674735Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.674787Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.674836Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:34.674877Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.674912Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.674955Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.674989Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:34.675083Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.675286Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.675794Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:531:2443], now have 1 active actors on pipe 2026-01-10T12:30:34.676979Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:537:2446], now have 1 active actors on pipe 2026-01-10T12:30:34.677066Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:538:2447], now have 1 active actors on pipe 2026-01-10T12:30:34.677170Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:539:2447], now have 1 active actors on pipe 2026-01-10T12:30:34.688133Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:544:2451], now have 1 active actors on pipe 2026-01-10T12:30:34.717479Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.720347Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.721682Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.721756Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:34.721905Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.722407Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.722688Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:600:2454] 2026-01-10T12:30:34.725105Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:34.726755Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:34.727098Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:34.727252Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:34.727638Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:34.727744Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:34.728011Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:34.728066Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:34.728222Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:34.728271Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:34.728396Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:34.728652Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:34.728708Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.728778Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:34.728833Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:600:2454] 2026-01-10T12:30:34.728895Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.728963Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.729036Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.729080Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:34.729121Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.729167Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.729213Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.729257Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:34.729353Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.729613Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.730344Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:538:2447] destroyed 2026-01-10T12:30:34.730663Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:537:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2026-01-10T12:30:33.653571Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.664522Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.666034Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:33.666146Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.666222Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:33.668158Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:260:2254], now have 1 active actors on pipe 2026-01-10T12:30:33.668388Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.691584Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.691785Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.699144Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.699953Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:33.700634Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:33.701472Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:268:2225] 2026-01-10T12:30:33.706195Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:33.706249Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:33.706301Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:268:2225] 2026-01-10T12:30:33.707349Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:33.707446Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:33.708120Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:33.708179Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:33.709062Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.709107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:33.709172Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.709225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:33.710432Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:33.711494Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:33.712353Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:271:2259], now have 1 active actors on pipe 2026-01-10T12:30:33.782942Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.786226Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.786533Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-10T12:30:33.786591Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.786651Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-10T12:30:33.787305Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:400:2356], now have 1 active actors on pipe 2026-01-10T12:30:33.787439Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.789482Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.789598Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.790662Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:33.790792Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:33.791078Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:33.791289Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:408:2327] 2026-01-10T12:30:33.792736Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:33.792785Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-10T12:30:33.792818Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:408:2327] 2026-01-10T12:30:33.792858Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:33.792906Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:33.792931Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:33.792961Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:33.792993Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.793015Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:33.793042Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:33.793066Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-10T12:30:33.793136Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:33.793325Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:33.793691Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:411:2361], now have 1 active actors on pipe 2026-01-10T12:30:33.809776Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:33.812831Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:33.813069Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-10T12:30:33.813107Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.813160Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-10T12:30:33.813677Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [1:460:2397], now have 1 active actors on pipe 2026-01-10T12:30:33.813766Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:33.815794Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:33.815932Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:33.816859Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:33.817004Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializ ... 0000, To tx_18446744073709551615 2026-01-10T12:30:35.236040Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:35.236145Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.236219Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:35.237051Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:520:2438], now have 1 active actors on pipe 2026-01-10T12:30:35.237194Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.240071Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.240253Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.240944Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.241126Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.241551Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.241818Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:528:2409] 2026-01-10T12:30:35.244184Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.244261Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:35.244327Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:528:2409] 2026-01-10T12:30:35.244395Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.244478Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.244532Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.244577Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:35.244628Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.244670Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.244719Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.244758Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:35.244865Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.245134Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.245694Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:531:2443], now have 1 active actors on pipe 2026-01-10T12:30:35.247530Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:538:2446], now have 1 active actors on pipe 2026-01-10T12:30:35.248295Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:541:2447], now have 1 active actors on pipe 2026-01-10T12:30:35.248366Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:540:2447], now have 1 active actors on pipe 2026-01-10T12:30:35.248618Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:542:2447], now have 1 active actors on pipe 2026-01-10T12:30:35.260128Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:556:2458], now have 1 active actors on pipe 2026-01-10T12:30:35.288816Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.291598Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.292612Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.292685Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:35.292827Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.293195Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.293461Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:612:2461] 2026-01-10T12:30:35.295751Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:35.297218Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:35.297609Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:35.297759Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:35.298142Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:35.298232Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:35.298489Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:35.298545Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:35.298599Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:35.298646Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:35.298768Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:35.299012Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:35.299078Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.299127Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:35.299183Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:612:2461] 2026-01-10T12:30:35.299252Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.299320Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.299370Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.299419Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:35.299461Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.299504Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.299549Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.299591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:35.299700Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.299914Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.300912Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:538:2446] destroyed 2026-01-10T12:30:35.300991Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928137] server disconnected, pipe [3:540:2447] destroyed 2026-01-10T12:30:35.301085Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:541:2447] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-10T12:30:35.082572Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.086577Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.086994Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:35.087093Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.087169Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:35.087916Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:257:2251], now have 1 active actors on pipe 2026-01-10T12:30:35.088024Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.112855Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.113069Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.113887Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.114104Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.114552Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.115009Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:265:2222] 2026-01-10T12:30:35.117526Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.117591Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:35.117661Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:265:2222] 2026-01-10T12:30:35.117721Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.117786Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.117833Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.117869Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:35.117912Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.117947Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.117988Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.118025Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:35.118153Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.118407Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.118908Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:268:2256], now have 1 active actors on pipe 2026-01-10T12:30:35.166470Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.169926Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.170328Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-10T12:30:35.170392Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.170453Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-10T12:30:35.171250Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:398:2354], now have 1 active actors on pipe 2026-01-10T12:30:35.171389Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.173812Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:35.173939Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.174760Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:35.174921Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.175296Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.175518Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:406:2325] 2026-01-10T12:30:35.177716Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.177775Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2026-01-10T12:30:35.177818Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:406:2325] 2026-01-10T12:30:35.177869Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.177933Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.177981Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.178019Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928138][Partition][1][StateIdle] Process user action and tx events 2026-01-10T12:30:35.178054Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.178123Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928138][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.178169Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.178204Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-10T12:30:35.178287Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.178512Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.179009Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:409:2359], now have 1 active actors on pipe 2026-01-10T12:30:35.195752Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.199426Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.199780Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:35.199872Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.199931Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:35.200727Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:458:2395], now have 1 active actors on pipe 2026-01-10T12:30:35.200868Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.203126Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.203245Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.204026Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.204188Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.204531Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.204780Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:466:2366] 2026-01-10T12:30:35.206777Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.206835Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:35.206895Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:466:2366] 2026-01-10T12:30:35.206947Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.207004Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.207047Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.207081Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:35.207126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.207160Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.207198Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.207254Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:35.207343Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.207540Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.208010Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:469:2400], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2026-01-10T12:30:35.216350Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:476:2403], now have 1 active actors on pipe 2026-01-10T12:30:35.216921Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:479:2404], now have 1 active actors on pipe 2026-01-10T12:30:35.217165Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:480:2404], now have 1 active actors on pipe 2026-01-10T12:30:35.217922Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:476:2403] destroyed 2026-01-10T12:30:35.218553Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [2:479:2404] destroyed 2026-01-10T12:30:35.218673Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:480:2404] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-10T12:30:34.594932Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.598510Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.598816Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:34.598903Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.598962Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:34.599561Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:256:2250], now have 1 active actors on pipe 2026-01-10T12:30:34.599672Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.618598Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:34.618768Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.619595Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:34.619783Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.620129Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.620494Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:264:2221] 2026-01-10T12:30:34.622306Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.622354Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:34.622423Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:264:2221] 2026-01-10T12:30:34.622489Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.622553Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.622602Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.622642Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:34.622686Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.622721Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.622761Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.622793Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:34.622907Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.623090Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.623461Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:267:2255], now have 1 active actors on pipe 2026-01-10T12:30:34.685246Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.688409Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.688745Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-10T12:30:34.688808Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.688861Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-10T12:30:34.689489Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:394:2350], now have 1 active actors on pipe 2026-01-10T12:30:34.689547Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.691689Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:34.691833Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.692479Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:34.692576Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:34.692851Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:34.693321Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:402:2321] 2026-01-10T12:30:34.695328Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:34.695373Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-10T12:30:34.695406Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:402:2321] 2026-01-10T12:30:34.695441Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:34.695484Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:34.695513Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:34.695543Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:34.695578Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.695612Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:34.695659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:34.695696Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-10T12:30:34.695798Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:34.695998Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:34.696516Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:405:2355], now have 1 active actors on pipe 2026-01-10T12:30:34.711133Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:34.715676Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:34.716042Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-10T12:30:34.716129Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.716191Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-10T12:30:34.717000Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:454:2391], now have 1 active actors on pipe 2026-01-10T12:30:34.717147Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:34.719661Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:34.719812Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:34.720758Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... 37928138][Partition][1][StateIdle] Try persist 2026-01-10T12:30:36.037558Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.037802Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.038353Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:473:2403], now have 1 active actors on pipe 2026-01-10T12:30:36.054993Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.058891Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.059256Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:36.059337Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.059414Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.060278Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:522:2439], now have 1 active actors on pipe 2026-01-10T12:30:36.060423Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:36.062931Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.063064Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.063733Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.063884Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.064284Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.064539Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:530:2410] 2026-01-10T12:30:36.066869Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.066953Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.067009Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:530:2410] 2026-01-10T12:30:36.067090Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.067168Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.067225Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.067268Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.067312Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.067356Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.067406Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.067447Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.067561Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.067843Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.068440Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:533:2444], now have 1 active actors on pipe 2026-01-10T12:30:36.069829Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:539:2447], now have 1 active actors on pipe 2026-01-10T12:30:36.070125Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:540:2448], now have 1 active actors on pipe 2026-01-10T12:30:36.070458Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:541:2448], now have 1 active actors on pipe 2026-01-10T12:30:36.081410Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:549:2455], now have 1 active actors on pipe 2026-01-10T12:30:36.105452Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.107822Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.108810Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.108876Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.109011Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.109324Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.109572Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:605:2458] 2026-01-10T12:30:36.111749Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:36.113046Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:36.113372Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:36.113502Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:36.113734Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:36.113817Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:36.114073Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:36.114148Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:36.114193Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:36.114235Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:36.114369Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:36.114585Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:36.114635Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.114689Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.114738Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:605:2458] 2026-01-10T12:30:36.114797Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.114865Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.114914Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.114962Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.115024Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.115067Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.115113Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.115152Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.115255Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.115450Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.116160Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:540:2448] destroyed 2026-01-10T12:30:36.116249Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:539:2447] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-10T12:30:35.156803Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.160721Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.161136Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:35.161229Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.161310Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:35.162278Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:258:2252], now have 1 active actors on pipe 2026-01-10T12:30:35.162429Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.185773Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.186003Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.187021Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.187326Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.187823Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.188378Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:266:2223] 2026-01-10T12:30:35.191557Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.191635Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:35.191728Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:266:2223] 2026-01-10T12:30:35.191797Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.191883Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.191945Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.191991Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:35.192051Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.192124Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.192174Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.192216Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:35.192365Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.192707Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.193282Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:269:2257], now have 1 active actors on pipe 2026-01-10T12:30:35.257845Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.262520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.262960Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:35.263035Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.263125Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:35.264014Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-10T12:30:35.264206Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.267199Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.267356Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.268352Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:35.268603Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.269083Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.269351Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2326] 2026-01-10T12:30:35.271429Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.271490Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:35.271544Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2326] 2026-01-10T12:30:35.271596Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.271660Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.271707Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.271746Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:35.271779Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.271830Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.271870Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.271923Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:35.272011Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.272330Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.272877Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-10T12:30:35.274452Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:416:2363], now have 1 active actors on pipe 2026-01-10T12:30:35.274923Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-10T12:30:35.275557Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:416:2363] destroyed 2026-01-10T12:30:35.275944Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:418:2364] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-10T12:30:35.467341Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.470868Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.471218Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:35.471297Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.471356Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:35.472152Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:259:2253], now have 1 active actors on pipe 2026-01-10T12:30:35.472280Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.491361Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.491535Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.492481Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.492670Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.493124Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.493555Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:267:2224] 2026-01-10T12:30:35.496214Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.496276Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:35.496334Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:267:2224] 2026-01-10T12:30:35.496389Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.496452Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.496503Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.496538Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:35.496605Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.496650Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.496686Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.496717Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:35.496831Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.497077Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.497537Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:270:2258], now have 1 active actors on pipe 2026-01-10T12:30:35.551520Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.555753Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.556104Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-10T12:30:35.556183Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.556305Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-10T12:30:35.557085Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:400:2356], now have 1 active actors on pipe 2026-01-10T12:30:35.557209Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.559685Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.559807Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.560771Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:35.560934Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:35.561286Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:35.561558Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:408:2327] 2026-01-10T12:30:35.563749Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:35.563828Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-10T12:30:35.563891Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:408:2327] 2026-01-10T12:30:35.563964Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:35.564041Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:35.564132Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:35.564184Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:35.564239Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.564277Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:35.564315Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:35.564352Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-10T12:30:35.564452Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:35.564705Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:35.565251Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:411:2361], now have 1 active actors on pipe 2026-01-10T12:30:35.584671Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:35.588337Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:35.588691Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-10T12:30:35.588765Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.588855Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-10T12:30:35.589725Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:460:2397], now have 1 active actors on pipe 2026-01-10T12:30:35.589798Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:35.593099Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-10T12:30:35.593235Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:35.594053Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInfligh ... 6.405642Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-10T12:30:36.405733Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.405958Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.406461Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:473:2403], now have 1 active actors on pipe 2026-01-10T12:30:36.424279Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.427727Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.428052Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:36.428135Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.428196Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.428960Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:522:2439], now have 1 active actors on pipe 2026-01-10T12:30:36.429091Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:36.431478Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.431615Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.432413Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 8 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.432540Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.432821Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.432997Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:530:2410] 2026-01-10T12:30:36.434929Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.434995Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.435051Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:530:2410] 2026-01-10T12:30:36.435105Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.435164Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.435207Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.435245Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.435285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.435320Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.435360Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.435395Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.435493Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.435761Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.436300Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:533:2444], now have 1 active actors on pipe 2026-01-10T12:30:36.437274Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:539:2447], now have 1 active actors on pipe 2026-01-10T12:30:36.437407Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:540:2448], now have 1 active actors on pipe 2026-01-10T12:30:36.437468Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:541:2448], now have 1 active actors on pipe 2026-01-10T12:30:36.448591Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:546:2452], now have 1 active actors on pipe 2026-01-10T12:30:36.472654Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.475184Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.476479Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.476547Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.476703Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.477241Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.477543Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:602:2455] 2026-01-10T12:30:36.479688Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:36.481041Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:36.481365Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:36.481497Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:36.481782Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:36.481866Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:36.482148Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:36.482200Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:36.482246Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:36.482282Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:36.482406Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:36.482632Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:36.482679Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.482725Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.482774Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:602:2455] 2026-01-10T12:30:36.482823Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.482891Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.482937Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.482975Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.483011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.483074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.483119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.483155Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.483244Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.483440Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.484149Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:540:2448] destroyed 2026-01-10T12:30:36.484222Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:539:2447] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-10T12:30:36.061090Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.064572Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.064940Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:36.065012Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.065083Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-10T12:30:36.065878Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:257:2251], now have 1 active actors on pipe 2026-01-10T12:30:36.066041Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:36.085711Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:36.085937Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.086827Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-10T12:30:36.087045Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.087520Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.087978Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:265:2222] 2026-01-10T12:30:36.090699Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.090764Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-10T12:30:36.090839Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:265:2222] 2026-01-10T12:30:36.090905Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.090973Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.091025Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.091074Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-10T12:30:36.091120Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.091157Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.091202Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.091241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-10T12:30:36.091359Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.091601Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.092163Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:268:2256], now have 1 active actors on pipe 2026-01-10T12:30:36.144882Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.148437Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.148815Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-10T12:30:36.148876Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.148939Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.149733Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:397:2353], now have 1 active actors on pipe 2026-01-10T12:30:36.149877Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-10T12:30:36.152196Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.152327Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.153172Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-10T12:30:36.153334Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.153689Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.153949Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:405:2324] 2026-01-10T12:30:36.156313Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.156378Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.156428Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:405:2324] 2026-01-10T12:30:36.156485Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.156553Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.156600Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.156642Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.156688Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.156750Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.156804Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.156846Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.156938Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.157172Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.157675Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:408:2358], now have 1 active actors on pipe 2026-01-10T12:30:36.159148Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:414:2361], now have 1 active actors on pipe 2026-01-10T12:30:36.159526Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:36.159651Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:416:2362], now have 1 active actors on pipe 2026-01-10T12:30:36.160045Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:36.160310Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:414:2361] destroyed 2026-01-10T12:30:36.160693Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:416:2362] destroyed 2026-01-10T12:30:36.654159Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.657033Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.657314Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-10T12:30:36.657367Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.657419Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01 ... titionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:36.805614Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-10T12:30:36.816613Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:546:2453], now have 1 active actors on pipe 2026-01-10T12:30:36.843616Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-10T12:30:36.846562Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-10T12:30:36.847768Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-10T12:30:36.847856Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-10T12:30:36.847999Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-10T12:30:36.848415Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-10T12:30:36.848694Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:602:2456] 2026-01-10T12:30:36.850729Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-10T12:30:36.851861Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-10T12:30:36.852178Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-10T12:30:36.852307Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-10T12:30:36.852706Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-10T12:30:36.852800Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-10T12:30:36.853043Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-10T12:30:36.853118Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-10T12:30:36.853168Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-10T12:30:36.853212Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-10T12:30:36.853321Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-10T12:30:36.853549Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-10T12:30:36.853588Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-10T12:30:36.853621Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-10T12:30:36.853660Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:602:2456] 2026-01-10T12:30:36.853706Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-10T12:30:36.853765Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-10T12:30:36.853806Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-10T12:30:36.853837Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-10T12:30:36.853876Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.853910Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-10T12:30:36.853960Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-10T12:30:36.854011Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-10T12:30:36.854101Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-10T12:30:36.854279Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-10T12:30:36.855014Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:537:2446] destroyed 2026-01-10T12:30:36.855203Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:536:2445] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2026-01-10T12:29:44.352154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:44.375785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:44.376020Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:44.382555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:44.382775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:44.382986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:44.383077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:44.383179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:44.383251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:44.383326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:44.383413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:44.383504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:44.383616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:44.383709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:44.383806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:44.383917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:44.406110Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:44.406420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:44.406491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:44.406652Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:44.406788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:44.406861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:44.406896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:44.407025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:44.407079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:44.407109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:44.407134Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:44.407269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:44.407348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:44.407394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:44.407433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:44.407531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:44.407591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:44.407629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:44.407659Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:44.407694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:44.407741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:44.407768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:44.407808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:44.407856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:44.407886Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:44.408047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:44.408163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:44.408189Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:44.408315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:44.408361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:44.408381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:44.408431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:44.408492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:44.408517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:44.408565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:44.408601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:44.408632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:44.408764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:44.408812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=15; 2026-01-10T12:30:33.929054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=840; 2026-01-10T12:30:33.929099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=55840; 2026-01-10T12:30:33.929141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=55990; 2026-01-10T12:30:33.929199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-10T12:30:33.929617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=366; 2026-01-10T12:30:33.929675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=57011; 2026-01-10T12:30:33.929829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=94; 2026-01-10T12:30:33.929931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2026-01-10T12:30:33.930320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=348; 2026-01-10T12:30:33.930562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=199; 2026-01-10T12:30:33.942559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11933; 2026-01-10T12:30:33.953216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10534; 2026-01-10T12:30:33.953328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-10T12:30:33.953388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2026-01-10T12:30:33.953428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-10T12:30:33.953490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2026-01-10T12:30:33.953522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-10T12:30:33.953590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2026-01-10T12:30:33.953623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:30:33.953675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-10T12:30:33.953745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=40; 2026-01-10T12:30:33.953808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=39; 2026-01-10T12:30:33.953839Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=91681; 2026-01-10T12:30:33.953962Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:33.954057Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:33.954122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:33.954185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:33.954231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:33.954413Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:33.954475Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:33.954510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:33.954549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:33.954606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047888480;tx_id=18446744073709551615;;current_snapshot_ts=1768048185881; 2026-01-10T12:30:33.954642Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:33.954683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:33.954712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:33.954805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:33.955011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.194000s; 2026-01-10T12:30:33.957909Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:33.958190Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:33.958243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:33.958305Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:33.958349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-10T12:30:33.958404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1768047888480;tx_id=18446744073709551615;;current_snapshot_ts=1768048185881; 2026-01-10T12:30:33.958443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:33.958482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:33.958517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:33.958583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:30:33.958633Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:33.959214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.175000s; 2026-01-10T12:30:33.959252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |96.5%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::ReadSpecialColumns >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::WriteEraseRead >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::TableStats >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2026-01-10T12:30:40.776481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.777625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.777679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.794545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.794925Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.795234Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.849372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.866645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.872705Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.877651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.877755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.877824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.878319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.878442Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.878542Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:30:40.984948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.037162Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.037396Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.037540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:30:41.037618Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.037675Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.037725Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.037985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.038051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.038488Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.038613Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.038677Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.038723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.038786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.038828Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.038889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.038956Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.039014Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.039137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.039190Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.039239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:30:41.051086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.051188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.051301Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.051507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.051580Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.051647Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.051703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.051756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.051822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.051864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.052220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.052268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.052315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.052356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.052442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.052484Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.052524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.052562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.052603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.065478Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.065553Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.065602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.065704Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.065807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.066520Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.066585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.066642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:30:41.066722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.066790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.066965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.067014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:30:41.067096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.067149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.071612Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.071700Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.071990Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.072044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.072146Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.072202Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.072241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:41.072320Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:30:41 ... ine.cpp:1937: Add [0:7] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:30:43.436867Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:30:43.436911Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:43.436963Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 9437184 2026-01-10T12:30:43.436999Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-10T12:30:43.437024Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:30:43.437046Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:30:43.437070Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BlockFailPoint 2026-01-10T12:30:43.437095Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-10T12:30:43.437119Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:30:43.437141Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:30:43.437163Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit ExecuteDataTx 2026-01-10T12:30:43.437701Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:7] at tablet 9437184 with status COMPLETE 2026-01-10T12:30:43.437768Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:7] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 3, SelectRangeBytes: 46, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:30:43.437825Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-10T12:30:43.437853Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:30:43.437879Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:43.437907Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit FinishPropose 2026-01-10T12:30:43.437947Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 7 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:30:43.438002Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is DelayComplete 2026-01-10T12:30:43.438037Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:43.438072Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit CompletedOperations 2026-01-10T12:30:43.438107Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit CompletedOperations 2026-01-10T12:30:43.438165Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-10T12:30:43.438187Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit CompletedOperations 2026-01-10T12:30:43.438210Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 9437184 has finished 2026-01-10T12:30:43.438267Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:43.438302Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 9437184 on unit FinishPropose 2026-01-10T12:30:43.438337Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:43.441994Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-10T12:30:43.442049Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-10T12:30:43.442369Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:308:2289], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:43.442407Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:43.442451Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:307:2288], serverId# [3:308:2289], sessionId# [0:0:0] 2026-01-10T12:30:43.442652Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\342\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\002\203\004\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?6\003?4e\005\001\013?:\003?8m\005\001\003?<\002\003?>\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\ 2026-01-10T12:30:43.442694Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:43.442763Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:43.443407Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2026-01-10T12:30:43.443492Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-10T12:30:43.443529Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2026-01-10T12:30:43.443564Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:30:43.443599Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:30:43.443638Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:43.443710Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 9437184 2026-01-10T12:30:43.443746Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-10T12:30:43.443769Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:30:43.443791Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:30:43.443812Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BlockFailPoint 2026-01-10T12:30:43.443839Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-10T12:30:43.443859Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:30:43.443879Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:30:43.443903Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2026-01-10T12:30:43.444436Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2026-01-10T12:30:43.444499Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:30:43.444555Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-10T12:30:43.444581Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:30:43.444606Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:43.444631Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit FinishPropose 2026-01-10T12:30:43.444665Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-10T12:30:43.444726Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is DelayComplete 2026-01-10T12:30:43.444755Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:43.444789Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit CompletedOperations 2026-01-10T12:30:43.444819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2026-01-10T12:30:43.444860Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-10T12:30:43.444882Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2026-01-10T12:30:43.444909Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 9437184 has finished 2026-01-10T12:30:43.444959Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:43.444991Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:8] at 9437184 on unit FinishPropose 2026-01-10T12:30:43.445032Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> test_auditlog.py::test_create_and_remove_tenant [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2026-01-10T12:30:40.783341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.783393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.784001Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.802592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.803007Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.803277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.852597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.872698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.873968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.876028Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.876141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.876205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.878685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.878919Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.879003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:30:40.996621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.036631Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.036830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.036935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:30:41.036977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.037039Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.037095Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.037353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.037398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.037629Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.037695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.037734Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.037758Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.037796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.037824Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.037848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.037882Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.037909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.037976Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.038013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.038041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:30:41.050652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.050726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.050811Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.050991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.051043Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.051110Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.051160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.051203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.051235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.051265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.051643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.051695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.051736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.051775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.051839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.051902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.051946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.051983Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.052020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.065830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.065896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.065927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.065995Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.066066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.066585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.066637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.066680Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:30:41.066802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.066839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.066963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.067001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-10T12:30:41.067033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.067067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.070524Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.070585Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.070806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.070843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.071114Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.071153Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.071184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:41.071220Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-10T12:30:41.071252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-10T12:30:41. ... 5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:30:45.646800Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:45.646837Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:30:45.646867Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompleteOperation 2026-01-10T12:30:45.646897Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2026-01-10T12:30:45.647143Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is DelayComplete 2026-01-10T12:30:45.647173Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2026-01-10T12:30:45.647225Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompletedOperations 2026-01-10T12:30:45.647255Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2026-01-10T12:30:45.647293Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Executed 2026-01-10T12:30:45.647315Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2026-01-10T12:30:45.647341Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437184 has finished 2026-01-10T12:30:45.647382Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:45.647422Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:45.647453Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:45.647480Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:45.647584Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-10T12:30:45.647649Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-10T12:30:45.647821Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316]) (priority=5 type=transaction resources={0, 97002198} resubmit=1) 2026-01-10T12:30:45.647870Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316]) to queue queue_transaction 2026-01-10T12:30:45.647922Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 97002198} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316]) from queue queue_transaction 2026-01-10T12:30:45.647958Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316]) to queue queue_transaction 2026-01-10T12:30:45.648003Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_transaction from 16.938813 to 33.877626 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316])) 2026-01-10T12:30:45.649186Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 97002198b}, Memory{0 dyn 97002198} 2026-01-10T12:30:45.649307Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) (release resources {0, 97002198}) 2026-01-10T12:30:45.649353Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 33.877626 to 16.938813 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230])) 2026-01-10T12:30:45.649433Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-10T12:30:45.649471Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2026-01-10T12:30:45.650480Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437185 restored its data 2026-01-10T12:30:46.209439Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2026-01-10T12:30:46.209528Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:30:46.209590Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2026-01-10T12:30:46.209622Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2026-01-10T12:30:46.209650Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437185 to execution unit CompleteOperation 2026-01-10T12:30:46.209681Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2026-01-10T12:30:46.209895Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is DelayComplete 2026-01-10T12:30:46.209925Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2026-01-10T12:30:46.209952Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437185 to execution unit CompletedOperations 2026-01-10T12:30:46.210000Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2026-01-10T12:30:46.210033Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is Executed 2026-01-10T12:30:46.210053Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2026-01-10T12:30:46.210079Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437185 has finished 2026-01-10T12:30:46.210107Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:46.210151Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-10T12:30:46.210180Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-10T12:30:46.210203Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-10T12:30:46.210307Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-10T12:30:46.210369Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-10T12:30:46.210551Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316]) (release resources {0, 97002198}) 2026-01-10T12:30:46.210601Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 16.938813 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:371:2316])) 2026-01-10T12:30:46.229581Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2026-01-10T12:30:46.229653Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:30:46.229694Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2026-01-10T12:30:46.229747Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-10T12:30:46.229850Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-10T12:30:46.229901Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-10T12:30:46.230166Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2026-01-10T12:30:46.230201Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:46.230228Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2026-01-10T12:30:46.230264Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-10T12:30:46.230312Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-10T12:30:46.230339Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:46.230567Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:349:2316], Recipient [3:460:2402]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-10T12:30:46.230614Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:30:46.230661Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2026-01-10T12:30:46.230748Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:238:2230], Recipient [3:460:2402]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-10T12:30:46.230774Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:30:46.230797Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2026-01-10T12:29:37.464783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:37.498670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:37.498979Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:37.506536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:37.506792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:37.507024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:37.507137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:37.507263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:37.507376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:37.507509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:37.507609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:37.507735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:37.507859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.508005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:37.508151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:37.508271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:37.540309Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:37.540673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:37.540724Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:37.540917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:37.541098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:37.541178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:37.541219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:37.541330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:37.541396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:37.541444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:37.541496Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:37.541667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:37.541725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:37.541766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:37.541802Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:37.541923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:37.541980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:37.542021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:37.542053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:37.542098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:37.542134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:37.542163Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:37.542208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:37.542256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:37.542285Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:37.542511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:37.542612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:37.542643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:37.542758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:37.542796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.542827Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:37.542884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:37.542944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:37.542976Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:37.543019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:37.543060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:37.543095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:37.543263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:37.543320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... odicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:22.185076Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:22.185133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-10T12:30:22.185196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-10T12:30:22.185327Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:22.185463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.185520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-10T12:30:22.185584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.186269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.186392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.186545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:22.186607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.626500s; 2026-01-10T12:30:22.186669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:22.187204Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.187430Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-10T12:30:22.187629Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-10T12:30:22.187671Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.187788Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[183] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-10T12:30:22.188407Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.188727Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:22.200966Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.201048Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-10T12:30:22.201289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::21cbc68c-ee2011f0-9b9a59b3-e3729b8f; 2026-01-10T12:30:22.201343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:22.201399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-10T12:30:22.201447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:22.201505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:22.201553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:22.201608Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:22.201688Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.625000s; 2026-01-10T12:30:22.201732Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=21cbc68c-ee2011f0-9b9a59b3-e3729b8f;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:22.201805Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:6171112:0] 2026-01-10T12:30:22.201853Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:6171112:0] 2026-01-10T12:30:22.201893Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:6171112:0] 2026-01-10T12:30:22.201929Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:6171112:0] 2026-01-10T12:30:22.201958Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2026-01-10T12:30:22.201985Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:1:3:0:6171112:0] 2026-01-10T12:30:22.202010Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2026-01-10T12:30:22.202051Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:6171112:0] 2026-01-10T12:30:22.202080Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6171112:0] 2026-01-10T12:30:22.202105Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-10T12:30:22.202134Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:6171112:0] 2026-01-10T12:30:22.202170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2026-01-10T12:30:22.202200Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:6171112:0] 2026-01-10T12:30:22.202232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6171112:0] 2026-01-10T12:30:22.202264Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:6171112:0] 2026-01-10T12:30:22.202297Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:6171112:0] 2026-01-10T12:30:22.202326Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:6171112:0] 2026-01-10T12:30:22.202352Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6171112:0] 2026-01-10T12:30:22.202377Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-10T12:30:22.202402Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6171112:0] GC for channel 2 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2026-01-10T12:30:40.781146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.781200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.781836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.801804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.802218Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.802515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.853024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.872507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.873449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.875454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.875535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.875613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.879492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.879729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.879833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2157] in generation 2 2026-01-10T12:30:40.982944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.025917Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.026201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.026332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:220:2216] 2026-01-10T12:30:41.026379Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.026444Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.026504Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.026781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.026842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.027211Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.027339Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.027407Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.027451Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.027509Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.027552Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.027589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.027649Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.027709Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.027827Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.027881Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.027939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2026-01-10T12:30:41.036786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.036865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.036966Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.037221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.037282Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.037361Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.037422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.037464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.037503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.037546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.037881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.037917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.037970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.038010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.038063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.038104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.038166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.038205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.038237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.051929Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.052032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.052070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.052175Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.052261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.053776Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:226:2222], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053841Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:225:2221], serverId# [1:226:2222], sessionId# [0:0:0] 2026-01-10T12:30:41.054090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.054149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.054302Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.054355Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:30:41.054404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.054449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.058827Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.058915Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.059180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.059229Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.059303Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.059348Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.059388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:41.059441Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:30:41.059499Z node 1 :TX_DATASHARD TRACE: dat ... [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:30:44.638526Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:44.638556Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:44.638590Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-10T12:30:44.638627Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-10T12:30:44.638671Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:30:44.638699Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-10T12:30:44.638736Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-10T12:30:44.684441Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:44.684515Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:30:44.684568Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2026-01-10T12:30:44.684652Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:45.621664Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-10T12:30:45.621781Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-10T12:30:45.628216Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:303:2283], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:45.628302Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:45.628385Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:302:2282], serverId# [3:303:2283], sessionId# [0:0:0] 2026-01-10T12:30:45.926143Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2026-01-10T12:30:45.929786Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:45.929993Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:46.002618Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2026-01-10T12:30:46.002748Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-10T12:30:46.002814Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2026-01-10T12:30:46.002860Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:30:46.002911Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:30:46.002971Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:46.003054Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 9437184 2026-01-10T12:30:46.003108Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-10T12:30:46.003130Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:30:46.003151Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:30:46.003174Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BlockFailPoint 2026-01-10T12:30:46.003198Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-10T12:30:46.003220Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:30:46.003243Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:30:46.003264Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-10T12:30:46.003320Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:46.003382Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:3] at 9437184 requested 46269686 more memory 2026-01-10T12:30:46.003431Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-10T12:30:46.003560Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:46.003624Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-10T12:30:46.003694Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:46.034531Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:3] at 9437184 exceeded memory limit 50463990 and requests 403711920 more for the next try 2026-01-10T12:30:46.038970Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 3 released its data 2026-01-10T12:30:46.039080Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-10T12:30:46.039452Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:46.039495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-10T12:30:46.117249Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 3 at 9437184 restored its data 2026-01-10T12:30:46.117349Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:30:46.237101Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2026-01-10T12:30:46.237226Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:30:46.237321Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:46.237364Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:30:46.237422Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:46.237464Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit FinishPropose 2026-01-10T12:30:46.237507Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is DelayComplete 2026-01-10T12:30:46.237540Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:46.237588Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit CompletedOperations 2026-01-10T12:30:46.237629Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2026-01-10T12:30:46.237684Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-10T12:30:46.237713Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2026-01-10T12:30:46.237758Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 9437184 has finished 2026-01-10T12:30:46.385809Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:46.385915Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:3] at 9437184 on unit FinishPropose 2026-01-10T12:30:46.385986Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2026-01-10T12:30:46.386125Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:46.577074Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:30:46.577167Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-10T12:30:46.601488Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:236:2229], Recipient [3:238:2230]: NKikimr::TEvTablet::TEvFollowerGcApplied |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2026-01-10T12:29:42.965160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:42.997907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:42.998129Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:43.005544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:43.005813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:43.006068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:43.006207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:43.006310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:43.006413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:43.006544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:43.006665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:43.006785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:43.006924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:43.007036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:43.007152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:43.007260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:43.039191Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:43.039683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:43.039751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:43.039953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:43.040186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:43.040265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:43.040314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:43.040438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:43.040512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:43.040559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:43.040588Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:43.040782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:43.040872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:43.040926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:43.040970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:43.041072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:43.041130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:43.041187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:43.041231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:43.041284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:43.041324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:43.041356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:43.041397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:43.041435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:43.041470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:43.041723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:43.041844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:43.041901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:43.042035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:43.042078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:43.042107Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:43.042169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:43.042232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:43.042283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:43.042337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:43.042374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:43.042405Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:43.042555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:43.042615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9190; 2026-01-10T12:30:53.119094Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-10T12:30:53.120136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=970; 2026-01-10T12:30:53.120214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=11595; 2026-01-10T12:30:53.120264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=11763; 2026-01-10T12:30:53.120332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-10T12:30:53.120430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2026-01-10T12:30:53.120482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=12734; 2026-01-10T12:30:53.120680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=129; 2026-01-10T12:30:53.120809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=84; 2026-01-10T12:30:53.121006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=150; 2026-01-10T12:30:53.121193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=143; 2026-01-10T12:30:53.123969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2711; 2026-01-10T12:30:53.127071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3018; 2026-01-10T12:30:53.127161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-10T12:30:53.127220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-10T12:30:53.127266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:30:53.127353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=50; 2026-01-10T12:30:53.127401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-10T12:30:53.127497Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2026-01-10T12:30:53.127543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-10T12:30:53.127623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=43; 2026-01-10T12:30:53.127719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2026-01-10T12:30:53.127828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=70; 2026-01-10T12:30:53.127885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31853; 2026-01-10T12:30:53.128049Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:30:53.128330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:30:53.128423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:30:53.128505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:30:53.128558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:30:53.128717Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:53.128789Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:53.128834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:53.128884Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:53.128967Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:53.129021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:53.129071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:53.129183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:53.129426Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.032000s; 2026-01-10T12:30:53.131469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:30:53.132615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:30:53.132689Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:30:53.132767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:30:53.132818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:30:53.132929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:30:53.133013Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:30:53.133074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:53.133125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:30:53.133217Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:30:53.133276Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:30:53.133927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.070000s; 2026-01-10T12:30:53.134011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001c3b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit_log.wzg7ojml.txt 2026-01-10T12:30:34.595537Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-10T12:30:34.602849Z: {"paths":"[/Root/users/database]","tx_id":"281474976710659","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2026-01-10T12:30:34.626071Z: {"paths":"[/Root/users/database]","tx_id":"281474976710660","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2026-01-10T12:30:34.641463Z: {"reason":"Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)","paths":"[default]","tx_id":"281474976710661","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2026-01-10T12:30:36.953537Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-10T12:30:37.081716Z: {"paths":"[/Root/users/database/.sys]","tx_id":"281474976725657","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} 2026-01-10T12:30:37.082271Z: {"paths":"[/Root/users/database/.sys/auth_owners]","tx_id":"281474976725658","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.082623Z: {"paths":"[/Root/users/database/.sys/query_sessions]","tx_id":"281474976725659","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.082898Z: {"paths":"[/Root/users/database/.sys/tables]","tx_id":"281474976725660","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.083153Z: {"paths":"[/Root/users/database/.sys/pg_class]","tx_id":"281474976725661","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.083394Z: {"paths":"[/Root/users/database/.sys/partition_stats]","tx_id":"281474976725662","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.083694Z: {"paths":"[/Root/users/database/.sys/query_metrics_one_minute]","tx_id":"281474976725663","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.083903Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_hour]","tx_id":"281474976725664","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.084197Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_hour]","tx_id":"281474976725665","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.084952Z: {"paths":"[/Root/users/database/.sys/compile_cache_queries]","tx_id":"281474976725666","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.085247Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_minute]","tx_id":"281474976725667","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.085562Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_minute]","tx_id":"281474976725668","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.085797Z: {"paths":"[/Root/users/database/.sys/resource_pool_classifiers]","tx_id":"281474976725669","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.086069Z: {"paths":"[/Root/users/database/.sys/nodes]","tx_id":"281474976725670","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.086361Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_hour]","tx_id":"281474976725671","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.086618Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_minute]","tx_id":"281474976725672","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.086836Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_minute]","tx_id":"281474976725673","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.087134Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_hour]","tx_id":"281474976725674","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.087393Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_hour]","tx_id":"281474976725675","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.087622Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_hour]","tx_id":"281474976725676","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.087863Z: {"paths":"[/Root/users/database/.sys/pg_tables]","tx_id":"281474976725677","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.088107Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_minute]","tx_id":"281474976725678","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.088361Z: {"paths":"[/Root/users/database/.sys/auth_group_members]","tx_id":"281474976725679","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.088569Z: {"paths":"[/Root/users/database/.sys/auth_users]","tx_id":"281474976725680","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.088817Z: {"paths":"[/Root/users/database/.sys/resource_pools]","tx_id":"281474976725681","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.089086Z: {"paths":"[/Root/users/database/.sys/streaming_queries]","tx_id":"281474976725682","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.089319Z: {"paths":"[/Root/users/database/.sys/auth_permissions]","tx_id":"281474976725683","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.089544Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_minute]","tx_id":"281474976725684","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.089765Z: {"paths":"[/Root/users/database/.sys/auth_effective_permissions]","tx_id":"281474976725685","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:37.091283Z: {"paths":"[/Root/users/database/.sys/auth_groups]","tx_id":"281474976725686","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-10T12:30:38.894858Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2026-01-10T12:30:38.902319Z: {"paths":"[/Root/users/database]","tx_id":"281474976710662","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2026-01-10T12:30:38.936736Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> Secret::Validation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:07.517712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:07.626821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:07.635019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:07.635511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:07.635619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:07.948802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:07.948925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:07.955140Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048084747299 != 1768048084747302 2026-01-10T12:28:07.966895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:08.011381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:08.117377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:08.593061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:08.593826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:08.593899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:08.593949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:08.594452Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:08.609075Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:20.406679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:20.406858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:20.407339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:20.407474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:30.749034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:979:2834], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.749352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.750493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:983:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.750698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.755490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:30.934190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1091:2913], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.934325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.934819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1095:2917], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.934920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.935014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1097:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:30.940428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:31.071311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1100:2922], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-10T12:28:31.424437Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1192:2987] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:32.095193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:32.605851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.196279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.945651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:34.384747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:35.583367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.913584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-10T12:28:35.976501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:35.976591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE S ... ain>: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:13.185004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:14.147316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:29:15.911483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715727:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:29:16.298329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715730:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:41.123100Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=16; 2026-01-10T12:29:41.123386Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:29:41.123598Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:29:41.123962Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3711:4880], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3623:4880]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3711:4880].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:29:41.124582Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3705:4880], SessionActorId: [1:3623:4880], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3623:4880]. 2026-01-10T12:29:41.125955Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZWE0NGQ5NWEtYzNmMzBhM2YtZDE2NmFhMDYtYjVjZmNkYTc=, ActorId: [1:3623:4880], ActorState: ExecuteState, LegacyTraceId: 01keky4fshb8b241y5e5ymqbn7, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3706:4880] from: [1:3705:4880] trace_id# 2026-01-10T12:29:41.126224Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3706:4880] TxId: 281474976715757. Ctx: { TraceId: 01keky4fshb8b241y5e5ymqbn7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE0NGQ5NWEtYzNmMzBhM2YtZDE2NmFhMDYtYjVjZmNkYTc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:29:41.126581Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZWE0NGQ5NWEtYzNmMzBhM2YtZDE2NmFhMDYtYjVjZmNkYTc=, ActorId: [1:3623:4880], ActorState: ExecuteState, LegacyTraceId: 01keky4fshb8b241y5e5ymqbn7, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-10T12:29:41.135439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01keky4fjmdrnff38sxx5q3s1y" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZWE0NGQ5NWEtYzNmMzBhM2YtZDE2NmFhMDYtYjVjZmNkYTc=" tx_control { tx_id: "01keky4fjmdrnff38sxx5q3s1y" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-10T12:30:04.940817Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4181:5320], for# root@builtin, access# DescribeSchema 2026-01-10T12:30:04.940953Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4181:5320], for# root@builtin, access# DescribeSchema 2026-01-10T12:30:04.943177Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4178:5317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:30:04.946227Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzRlMzBiMjUtOTM5NWY0MTYtYThiNmM3NWQtMzAyMjM1ZmM=, ActorId: [1:4172:5312], ActorState: ExecuteState, LegacyTraceId: 01keky573s9z8zwx1q9yrynvms, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:30:16.695073Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:28:09.507129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:28:09.588985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:28:09.595034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:28:09.595338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:28:09.595407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:28:09.858919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:28:09.859050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:28:09.863591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048087150997 != 1768048087151000 2026-01-10T12:28:09.880737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:09.926423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:28:10.016024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:28:10.431530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:28:10.432114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:28:10.432167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:28:10.432201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:28:10.432744Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:28:10.446928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:22.141135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.141284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.141368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.142482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.142641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:28:22.147423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:28:22.166467Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2826], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-10T12:28:22.212243Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2860] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:28:22.267500Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1028:2869], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2026-01-10T12:28:22.269962Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTAyNWVkZGMtZjk2MTZjNzktZWU3ZGQxOTItNDVlZTI2Njk=, ActorId: [1:949:2816], ActorState: ExecuteState, LegacyTraceId: 01keky22qn5b7g5k5yf7t8rw3g, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 column: 20 } message: "mismatched input \'-\' expecting \'(\'" end_position { row: 1 column: 20 } severity: 1 } tx_id# trace_id# REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-10T12:28:32.661482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:33.626456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:34.043009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:34.676047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:35.480672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:35.915291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:28:36.502545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:28:37.363167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-10T12:28:38.007917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:28:38.007989Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:28:39.265047Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MWJhZDNiMjYtNDNjMjE4MDAtYmI1ODg0OGMtOTgyYThjNWI=, ActorId: [1:1046:2879], ActorState: ExecuteState, LegacyTraceId: 01keky2csfey27zbrz376qww2g, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 not found for alter" severity: 1 } } trace_id# REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:14.610117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB ... t called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-10T12:29:18.135759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715731:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2026-01-10T12:29:31.088371Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzM1ZGRlYzItOWFkY2RlYzQtMmZhYzk2MzUtMTJmZmQ4YTI=, ActorId: [1:3290:4624], ActorState: ExecuteState, LegacyTraceId: 01keky45fxcfhj9bg9mwajpswb, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET_ACCESS\"" severity: 1 issues { message: "preparation problem: used in access secret secret2 not found" severity: 1 } } trace_id# REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Executing operation with object "SECRET_ACCESS"
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-10T12:29:43.463366Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=15; 2026-01-10T12:29:43.463622Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:29:43.463808Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-10T12:29:43.464144Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3651:4839], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3563:4839]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3651:4839].{
: Error: Conflict with existing key., code: 2012 } 2026-01-10T12:29:43.464698Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3645:4839], SessionActorId: [1:3563:4839], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3563:4839]. 2026-01-10T12:29:43.465170Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NjkyMTc3Ny1kODM5YTZmZS1kODZiNTg0NS1mNzM2YjgxMA==, ActorId: [1:3563:4839], ActorState: ExecuteState, LegacyTraceId: 01keky4j222wv2ram5fwnrq9ea, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3646:4839] from: [1:3645:4839] trace_id# 2026-01-10T12:29:43.465423Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [1:3646:4839] TxId: 281474976715757. Ctx: { TraceId: 01keky4j222wv2ram5fwnrq9ea, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjkyMTc3Ny1kODM5YTZmZS1kODZiNTg0NS1mNzM2YjgxMA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-10T12:29:43.465828Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NjkyMTc3Ny1kODM5YTZmZS1kODZiNTg0NS1mNzM2YjgxMA==, ActorId: [1:3563:4839], ActorState: ExecuteState, LegacyTraceId: 01keky4j222wv2ram5fwnrq9ea, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-10T12:29:43.473858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01keky4htk0j7bycc9qz31tqvm" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NjkyMTc3Ny1kODM5YTZmZS1kODZiNTg0NS1mNzM2YjgxMA==" tx_control { tx_id: "01keky4htk0j7bycc9qz31tqvm" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-10T12:29:55.931766Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MWIzMmQ4YTMtODJmYTI1YTUtNGM4NTg4MWItY2E2YzgxY2Y=, ActorId: [1:3833:5050], ActorState: ExecuteState, LegacyTraceId: 01keky4xpwcjac9dp25gbdbb02, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 using in access for test@test1" severity: 1 } } trace_id# REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-10T12:30:07.473438Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4122:5281], for# root@builtin, access# DescribeSchema 2026-01-10T12:30:07.473563Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4122:5281], for# root@builtin, access# DescribeSchema 2026-01-10T12:30:07.475622Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4119:5278], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:30:07.478791Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODg3NjkyMjUtYTcxNzA0ZDYtZTRiNDJiNzMtMjBhMjc3ZTQ=, ActorId: [1:4115:5275], ActorState: ExecuteState, LegacyTraceId: 01keky59jz2nn7tp9dwzbmgbf4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:30:19.158940Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator 2026-01-10T12:30:20.024327Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NjdmNGFhOTItMzAzNWU2NzgtMjIxZTNlYWEtYmI5NDVi, ActorId: [1:4351:5463], ActorState: ExecuteState, LegacyTraceId: 01keky5n0n5wd82cqeycyzrzbr, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot CREATE objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-10T12:30:32.548758Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2JlZGI4NjktNzllM2NjZTctNTExNzRkMDUtOTI1ZjczODM=, ActorId: [1:4702:5743], ActorState: ExecuteState, LegacyTraceId: 01keky614rd7ezzb6y835yk2nb, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot UPSERT objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001d35/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.9_ddx8j8.txt 2026-01-10T12:30:44.845463Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> TExportToS3Tests::UidAsIdempotencyKey ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001d34/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit_log.f3_68a6m.txt >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> TExportToS3Tests::UserSID >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::TablePermissions >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> TExportToS3Tests::UserSID [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> TExportToS3Tests::TransferExportNoConnString >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::TopicExport >> TExportToS3Tests::ExportStartTime [GOOD] >> TOlap::StoreStatsQuota [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TExportToS3Tests::ExportPartitioningSettings >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> TExportToS3Tests::TransferExportNoConnString [GOOD] >> TExportToS3Tests::TopicExport [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> TExportToS3Tests::TopicWithPermissionsExport >> TExportToS3Tests::TransferExportWithConnString >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TOlapNaming::AlterColumnStoreFailed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2026-01-10T12:30:40.783268Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.784133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.784185Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.792788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.794131Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.794438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.849133Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.866628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.872501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.877755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.877860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.877926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.878330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.878442Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.878538Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:30:40.984893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.020949Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.021152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.021266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:30:41.021326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.021365Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.021404Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.021632Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.021703Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.022069Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.022198Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.022278Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.022318Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.022372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.022411Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.022461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.022496Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.022533Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.022633Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.022676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.022726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:30:41.034499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.034581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.034706Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.034899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.034972Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.035036Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.035087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.035125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.035161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.035196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.035528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.035593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.035645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.035679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.035752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.035784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.035821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.035862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.035894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.049392Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.049465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.049501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.049579Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.049654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.053928Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:224:2220], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.054003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.054087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:223:2219], serverId# [1:224:2220], sessionId# [0:0:0] 2026-01-10T12:30:41.054203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.054255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.054422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.054477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:30:41.054521Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.054559Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.058969Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.059040Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.059259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.059308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.059385Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.059438Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.059478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:41.059524Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:30:41.059570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... sion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:31:15.667729Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2026-01-10T12:31:15.667788Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:31:15.667870Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:31:15.667906Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:31:15.667937Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit FinishPropose 2026-01-10T12:31:15.667969Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2026-01-10T12:31:15.668007Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is DelayComplete 2026-01-10T12:31:15.668033Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2026-01-10T12:31:15.668064Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit CompletedOperations 2026-01-10T12:31:15.673826Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2026-01-10T12:31:15.673930Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is Executed 2026-01-10T12:31:15.673966Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2026-01-10T12:31:15.674003Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1002] at 9437184 has finished 2026-01-10T12:31:15.696607Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:31:15.696681Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2026-01-10T12:31:15.696725Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-10T12:31:15.696802Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2026-01-10T12:31:15.707964Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-10T12:31:15.708018Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-10T12:31:15.709285Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4550:6467], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:15.709336Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:15.709373Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4549:6466], serverId# [3:4550:6467], sessionId# [0:0:0] 2026-01-10T12:31:15.709794Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2026-01-10T12:31:15.709833Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:31:15.709933Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:15.710482Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2026-01-10T12:31:15.710534Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-10T12:31:15.710567Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2026-01-10T12:31:15.710625Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:31:15.710658Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:31:15.710701Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:31:15.710784Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1003] at 9437184 2026-01-10T12:31:15.710825Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-10T12:31:15.710853Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:31:15.710879Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:31:15.710908Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BlockFailPoint 2026-01-10T12:31:15.710934Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-10T12:31:15.710956Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:31:15.710978Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:31:15.711000Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:15.711043Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:31:15.711421Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2026-01-10T12:31:15.711479Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:31:15.711532Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:31:15.711554Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:31:15.711577Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit FinishPropose 2026-01-10T12:31:15.711600Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2026-01-10T12:31:15.711626Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is DelayComplete 2026-01-10T12:31:15.711648Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2026-01-10T12:31:15.711667Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit CompletedOperations 2026-01-10T12:31:15.711686Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2026-01-10T12:31:15.711714Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-10T12:31:15.711730Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2026-01-10T12:31:15.711751Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1003] at 9437184 has finished 2026-01-10T12:31:15.777911Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-10T12:31:15.777976Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-10T12:31:15.780817Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:31:15.780886Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2026-01-10T12:31:15.780930Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2026-01-10T12:31:15.781011Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:31:15.785071Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:236:2229], Recipient [3:238:2230]: NKikimr::TEvTablet::TEvFollowerGcApplied .2026-01-10T12:31:15.788691Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4564:6480], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:15.788772Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:15.788830Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4563:6479], serverId# [3:4564:6480], sessionId# [0:0:0] 2026-01-10T12:31:15.789313Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553160, Sender [3:4562:6478], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1718 LastUpdateTime: 1718 KeyAccessSample { } } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::TopicsExport >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> TExportToS3Tests::TransferExportWithConnString [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:27:44.907843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:27:44.907950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:27:44.908069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:27:44.908143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:27:44.908194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:27:44.908262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:27:44.908361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:27:44.909332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:27:44.910346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:27:45.007223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:27:45.007299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:45.023999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:27:45.024151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:27:45.024372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:27:45.034003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:27:45.040500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:27:45.069135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:27:45.074233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:27:45.081143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.082990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:27:45.091299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:27:45.091528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:27:45.091585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:27:45.091640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:27:45.092723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:27:45.289400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.290962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:27:45.291751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nStep5000039 State->FrontStep: 5000039 2026-01-10T12:31:17.180436Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:17.180517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:31:17.180795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-10T12:31:17.180992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:17.181042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-10T12:31:17.181093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-10T12:31:17.181479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.181550Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-10T12:31:17.181618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-10T12:31:17.182488Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:31:17.182596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:31:17.182638Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:31:17.182679Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:31:17.182736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-10T12:31:17.183466Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:31:17.183549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:31:17.183578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:31:17.183608Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-10T12:31:17.183639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-10T12:31:17.183714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-10T12:31:17.186602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-10T12:31:17.186699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-10T12:31:17.186783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-10T12:31:17.187061Z node 2 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-10T12:31:17.187169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.187292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-10T12:31:17.188287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:31:17.188417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-10T12:31:17.189909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.207856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-10T12:31:17.207942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-10T12:31:17.208124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:31:17.217071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.217291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.217340Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-10T12:31:17.217487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:31:17.217532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:31:17.217582Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-10T12:31:17.217625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:31:17.217666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-10T12:31:17.217749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:749:2725] message: TxId: 102 2026-01-10T12:31:17.217810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-10T12:31:17.217856Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:31:17.217893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:31:17.218060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-10T12:31:17.220730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:31:17.220801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:813:2781] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-10T12:31:17.224616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:31:17.224898Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2026-01-10T12:31:17.225244Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-10T12:31:17.233605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:31:17.233902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-10T12:31:17.234379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-10T12:31:17.234431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-10T12:31:17.234913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-10T12:31:17.235036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-10T12:31:17.235078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:849:2817] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::TransferExportWithConsumer |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> TExportToS3Tests::TopicsExport [GOOD] >> TExportToS3Tests::TopicsWithPermissionsExport >> TExportToS3Tests::TransferExportWithConsumer [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] >> TExportToS3Tests::EncryptedExport >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs [GOOD] >> TSchemeShardExportToFsTests::FsExportWithMultipleTables >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> TExportToS3Tests::EncryptedExport [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:31:08.071725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:31:08.071845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:08.071885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:31:08.071927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:31:08.071976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:31:08.072007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:31:08.072105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:08.072186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:31:08.073059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:31:08.073295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:31:08.187274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:31:08.187342Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:31:08.204485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:31:08.204598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:31:08.204797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:31:08.214490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:31:08.214745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:31:08.215502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:31:08.215771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:31:08.219866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:08.220178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:31:08.221495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:08.221560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:08.221626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:31:08.221672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:31:08.221718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:31:08.221965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:31:08.397423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.398987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:08.399658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6-01-10T12:31:22.470558Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-10T12:31:22.470600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:31:22.470672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-10T12:31:22.474136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-10T12:31:22.474207Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-10T12:31:22.474256Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-10T12:31:22.475260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-10T12:31:22.475412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:31:22.475752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000044 FAKE_COORDINATOR: advance: minStep5000044 State->FrontStep: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000044 2026-01-10T12:31:22.476631Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000044, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:31:22.476778Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773230 } } Step: 5000044 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:31:22.476833Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000044, at schemeshard: 72057594046678944 2026-01-10T12:31:22.476957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-10T12:31:22.477019Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-10T12:31:22.477054Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:31:22.477098Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-10T12:31:22.477147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:31:22.477227Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-10T12:31:22.477317Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:31:22.477356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-10T12:31:22.477408Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-10T12:31:22.477475Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-10T12:31:22.477519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-10T12:31:22.477587Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-10T12:31:22.477628Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-10T12:31:22.477666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 20 2026-01-10T12:31:22.477701Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-10T12:31:22.478496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.480111Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:22.480160Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:31:22.480370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-10T12:31:22.480516Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:22.480585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-10T12:31:22.480636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 43 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-10T12:31:22.481508Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.481608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.481647Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-10T12:31:22.481685Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 20 2026-01-10T12:31:22.481725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 8 2026-01-10T12:31:22.482283Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.482379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.482408Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-10T12:31:22.482450Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-10T12:31:22.482487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-10T12:31:22.482565Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-10T12:31:22.482601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:130:2154] 2026-01-10T12:31:22.483499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:31:22.483551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-10T12:31:22.483628Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-10T12:31:22.490689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.491459Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:22.491571Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-10T12:31:22.491668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-10T12:31:22.492320Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:31:22.494165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:31:22.494218Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1501:3323] TestWaitNotification: OK eventTxId 106 |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2026-01-10T12:29:43.944496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:43.977144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:43.977390Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:43.985015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:43.985276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:43.985521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:43.985639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:43.985744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:43.985861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:43.986010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:43.986128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:43.986265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:43.986411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:43.986521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:43.986644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:43.986752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:44.019587Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:44.019976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:44.020031Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:44.020263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:44.020478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:44.020547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:44.020596Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:44.020716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:44.020789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:44.020865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:44.020907Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:44.021112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:44.021184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:44.021231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:44.021276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:44.021381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:44.021443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:44.021492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:44.021525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:44.021570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:44.021605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:44.021636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:44.021689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:44.021726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:44.021752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:44.022017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:44.022126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:44.022161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:44.022283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:44.022323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:44.022352Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:44.022416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:44.022484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:44.022520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:44.022568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:44.022611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:44.022640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:44.022838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:44.022907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=6764; 2026-01-10T12:31:21.744607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=16; 2026-01-10T12:31:21.745950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1265; 2026-01-10T12:31:21.746053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8552; 2026-01-10T12:31:21.746115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8741; 2026-01-10T12:31:21.746202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-10T12:31:21.746329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=67; 2026-01-10T12:31:21.746401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9763; 2026-01-10T12:31:21.746629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=156; 2026-01-10T12:31:21.746843Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=153; 2026-01-10T12:31:21.747128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=215; 2026-01-10T12:31:21.747377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=190; 2026-01-10T12:31:21.748337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=882; 2026-01-10T12:31:21.749168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=758; 2026-01-10T12:31:21.749228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-10T12:31:21.749271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-10T12:31:21.749306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-10T12:31:21.749393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-10T12:31:21.749438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-10T12:31:21.749527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2026-01-10T12:31:21.749567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-10T12:31:21.749628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2026-01-10T12:31:21.749709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2026-01-10T12:31:21.749786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2026-01-10T12:31:21.749825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25038; 2026-01-10T12:31:21.750001Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:31:21.750167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:31:21.750266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:31:21.750385Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:31:21.750454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:31:21.750665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:31:21.750741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:31:21.750791Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:31:21.750850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:31:21.750933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:31:21.750999Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.751060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.751224Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:31:21.751488Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.167000s; 2026-01-10T12:31:21.754165Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:31:21.754353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:31:21.754417Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:31:21.754500Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:31:21.754541Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:31:21.754615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:31:21.754712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:31:21.754782Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.754834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.754931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-10T12:31:21.755143Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:31:21.755564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.181000s; 2026-01-10T12:31:21.755626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2026-01-10T12:29:36.175874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:36.210343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:36.210592Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:36.217207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:36.217426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:36.217612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:36.217706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:36.217779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:36.217874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:36.217962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:36.218053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:36.218157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:36.218278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.218374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:36.218468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:36.218557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:36.239026Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:36.239311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:36.239360Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:36.239526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.239677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:36.239757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:36.239812Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:36.239918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:36.239979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:36.240028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:36.240054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:36.240215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:36.240263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:36.240293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:36.240324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:36.240394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:36.240459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:36.240493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:36.240528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:36.240567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:36.240591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:36.240616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:36.240662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:36.240706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:36.240726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:36.240888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:36.240983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:36.241008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:36.241100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:36.241125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.241144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:36.241185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:36.241231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:36.241253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:36.241282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:36.241318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:36.241344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:36.241515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:36.241560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;memory_size=286;data_size=280;sum=26280;count=92;size_of_portion=192; 2026-01-10T12:31:21.645400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5622; 2026-01-10T12:31:21.645511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-10T12:31:21.646687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1110; 2026-01-10T12:31:21.646777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7175; 2026-01-10T12:31:21.646834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7348; 2026-01-10T12:31:21.646911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-10T12:31:21.647015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=59; 2026-01-10T12:31:21.647062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8166; 2026-01-10T12:31:21.647285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=146; 2026-01-10T12:31:21.647474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=114; 2026-01-10T12:31:21.647671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=145; 2026-01-10T12:31:21.647846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=133; 2026-01-10T12:31:21.648567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=669; 2026-01-10T12:31:21.649606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=974; 2026-01-10T12:31:21.649688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-10T12:31:21.649755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=20; 2026-01-10T12:31:21.649803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-10T12:31:21.649886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-01-10T12:31:21.649928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-10T12:31:21.650024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=63; 2026-01-10T12:31:21.650067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-10T12:31:21.650139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2026-01-10T12:31:21.650241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-01-10T12:31:21.650373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=89; 2026-01-10T12:31:21.650430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25354; 2026-01-10T12:31:21.650618Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-10T12:31:21.650762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-10T12:31:21.650855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-10T12:31:21.650944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-10T12:31:21.651024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-10T12:31:21.651182Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:31:21.651261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-10T12:31:21.651321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:31:21.651387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:31:21.651485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:31:21.651545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.651592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.651702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:31:21.651935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.012000s; 2026-01-10T12:31:21.654037Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-10T12:31:21.654194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-10T12:31:21.654264Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-10T12:31:21.654370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-10T12:31:21.654459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-10T12:31:21.654549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-10T12:31:21.654624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.654688Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-10T12:31:21.654792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-10T12:31:21.654875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-10T12:31:21.656092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.107000s; 2026-01-10T12:31:21.656171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-10T12:29:23.979439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:24.011930Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:24.012190Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:24.019584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:24.019874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:24.020118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:24.020247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:24.020351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:24.020464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:24.020571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:24.020684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:24.020812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:24.020920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.021035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:24.021168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:24.021288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:24.052825Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:24.053205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:24.053259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:24.053472Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:24.053633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:24.053707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:24.053755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:24.053894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:24.053959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:24.054012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:24.054043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:24.054241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:24.054309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:24.054355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:24.054394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:24.054499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:24.054560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:24.054605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:24.054634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:24.054684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:24.054794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:24.054845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:24.054894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:24.054934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:24.054962Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:24.055212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:24.055278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:24.055322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:24.055449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:24.055494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.055520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:24.055573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:24.055649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:24.055685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:24.055727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:24.055763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:24.055795Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:24.055924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:24.055981Z node 1 :TX_COLUMNSHARD WAR ... ;finished=1; 2026-01-10T12:31:21.681144Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:31:21.681182Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:31:21.681733Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:31:21.681908Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.681940Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:31:21.682093Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-10T12:31:21.682168Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-10T12:31:21.682417Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-10T12:31:21.682576Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.682744Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.682885Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.683138Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:31:21.683303Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.683428Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.683679Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2026-01-10T12:31:21.684248Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.009},{"events":["f_ack"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":118178672,"name":"_full_task","f":118178672,"d_finished":0,"c":0,"l":118190979,"d":12307},"events":[{"name":"bootstrap","f":118178926,"d_finished":1359,"c":1,"l":118180285,"d":1359},{"a":118190355,"name":"ack","f":118188945,"d_finished":1198,"c":1,"l":118190143,"d":1822},{"a":118190344,"name":"processing","f":118180440,"d_finished":3278,"c":3,"l":118190145,"d":3913},{"name":"ProduceResults","f":118179898,"d_finished":2094,"c":6,"l":118190682,"d":2094},{"a":118190686,"name":"Finish","f":118190686,"d_finished":0,"c":0,"l":118190979,"d":293},{"name":"task_result","f":118180453,"d_finished":2019,"c":2,"l":118188439,"d":2019}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.684333Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:31:21.684859Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.009},{"events":["f_ack"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":118178672,"name":"_full_task","f":118178672,"d_finished":0,"c":0,"l":118191614,"d":12942},"events":[{"name":"bootstrap","f":118178926,"d_finished":1359,"c":1,"l":118180285,"d":1359},{"a":118190355,"name":"ack","f":118188945,"d_finished":1198,"c":1,"l":118190143,"d":2457},{"a":118190344,"name":"processing","f":118180440,"d_finished":3278,"c":3,"l":118190145,"d":4548},{"name":"ProduceResults","f":118179898,"d_finished":2094,"c":6,"l":118190682,"d":2094},{"a":118190686,"name":"Finish","f":118190686,"d_finished":0,"c":0,"l":118191614,"d":928},{"name":"task_result","f":118180453,"d_finished":2019,"c":2,"l":118188439,"d":2019}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:21.684930Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:31:21.669038Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-10T12:31:21.684961Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:31:21.685102Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:31:09.466017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:31:09.466133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:09.466171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:31:09.466207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:31:09.466242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:31:09.466289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:31:09.466363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:09.466450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:31:09.467329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:31:09.467655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:31:09.557609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:31:09.557667Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:31:09.572926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:31:09.573050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:31:09.573257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:31:09.581724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:31:09.581978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:31:09.582778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:31:09.583019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:31:09.585316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:09.585527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:31:09.586685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:09.586732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:09.586803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:31:09.586854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:31:09.586932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:31:09.587209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:31:09.736953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.737864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.737995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:09.738946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2026-01-10T12:31:24.632590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72057594037968897 2026-01-10T12:31:24.632655Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-10T12:31:24.633725Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-10T12:31:24.634009Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409548 2026-01-10T12:31:24.634149Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-10T12:31:24.634190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/2, is published: true 2026-01-10T12:31:24.634235Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-10T12:31:24.634592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-10T12:31:24.634660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-10T12:31:24.634866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-10T12:31:24.634939Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-10T12:31:24.635045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-10T12:31:24.635177Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:1 2 -> 3 2026-01-10T12:31:24.636914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2026-01-10T12:31:24.637236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1813: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 281474976710758, partId# 0, prevPartId# 1, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "1" CopyFromTable: "/MyRoot/Table2" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true, newTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "0" CopyFromTable: "/MyRoot/Table1" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true 2026-01-10T12:31:24.637305Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-10T12:31:24.637543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:24.637810Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-10T12:31:24.641656Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409549 2026-01-10T12:31:24.641820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-10T12:31:24.641886Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-10T12:31:24.642087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-10T12:31:24.642156Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-10T12:31:24.642240Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-10T12:31:24.642341Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:0 2 -> 3 2026-01-10T12:31:24.647159Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:24.647375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:24.652529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-10T12:31:24.653101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-10T12:31:24.653651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-10T12:31:24.654187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-10T12:31:24.654275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:1 ProgressState at tablet# 72057594046678944 2026-01-10T12:31:24.654387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:1 Propose modify scheme on dstDatashard# 72075186233409548 idx# 72057594046678944:4 srcDatashard# 72075186233409547 idx# 72057594046678944:2 operationId# 281474976710758:1 seqNo# 2:3 at tablet# 72057594046678944 2026-01-10T12:31:24.655991Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-10T12:31:24.656051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2026-01-10T12:31:24.656134Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409549 idx# 72057594046678944:3 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:4 at tablet# 72057594046678944 2026-01-10T12:31:24.662912Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:4 msg type: 269549568 2026-01-10T12:31:24.663088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-10T12:31:24.663205Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409548 2026-01-10T12:31:24.663248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409547 2026-01-10T12:31:24.664240Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:3 msg type: 269549568 2026-01-10T12:31:24.664371Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-10T12:31:24.664455Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409549 2026-01-10T12:31:24.664494Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:113:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:113:2143] Leader for TabletID 9437184 is [1:135:2157] sender: [1:138:2057] recipient: [1:113:2143] 2026-01-10T12:30:40.773341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.773418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.774078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.798606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.798982Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.799258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.809130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.882835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.882956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.884936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.885045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.885103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.885557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.886040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.886144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:207:2157] in generation 2 Leader for TabletID 9437184 is [1:135:2157] sender: [1:215:2057] recipient: [1:14:2061] 2026-01-10T12:30:40.979977Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.012527Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.013879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.014040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-10T12:30:41.014120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.014189Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.014250Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.014549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.014614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.015879Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.015994Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.016048Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.016121Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.016242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.016287Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.016378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.016427Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.016477Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.016593Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.016662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.016719Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2026-01-10T12:30:41.030185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.030245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.030333Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.030510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.030571Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.030621Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.030658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.030689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.030729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.030757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.031036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.031062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.031087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.031112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.031168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.031188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.031214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.031241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.031268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.049461Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.049535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.049571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.049654Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.049729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.053592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-10T12:30:41.053814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.053842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.054020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.054060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-10T12:30:41.054093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.054134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.057913Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.057991Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.058275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058412Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.058462Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.058498Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... rd_impl.h:3153: StateWork, received event# 269877761, Sender [23:297:2278], Recipient [23:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:25.788966Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:25.789033Z node 23 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [23:296:2277], serverId# [23:297:2278], sessionId# [0:0:0] 2026-01-10T12:31:25.789332Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [23:103:2137], Recipient [23:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 98784249945 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-10T12:31:25.789382Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:31:25.789509Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:25.790693Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-10T12:31:25.790795Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:31:25.790854Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-10T12:31:25.790905Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:31:25.790955Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:31:25.791007Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:25.791092Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-10T12:31:25.791152Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:31:25.791183Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:31:25.791211Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:31:25.799893Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-10T12:31:25.799981Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:31:25.800030Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:31:25.800070Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:31:25.800130Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:25.800208Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:25.800292Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:2] at 9437184 requested 132390 more memory 2026-01-10T12:31:25.800353Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-10T12:31:25.800799Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:25.800863Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:25.800933Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:25.802150Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 132518 and requests 1060144 more for the next try 2026-01-10T12:31:25.802383Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-10T12:31:25.802470Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-10T12:31:25.802772Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:25.802818Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:25.803871Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-10T12:31:25.803950Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:25.808777Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 1192662 and requests 9541296 more for the next try 2026-01-10T12:31:25.808973Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-10T12:31:25.809028Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-10T12:31:25.809315Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:25.809356Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:25.810104Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-10T12:31:25.810164Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:25.810743Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 10733958 and requests 85871664 more for the next try 2026-01-10T12:31:25.810876Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-10T12:31:25.810918Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-10T12:31:25.811136Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:31:25.811174Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-10T12:31:25.811804Z node 23 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-10T12:31:25.811858Z node 23 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-10T12:31:26.153084Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-10T12:31:26.153222Z node 23 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:31:26.153337Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:31:26.153382Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:31:26.153445Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-10T12:31:26.153503Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:31:26.153639Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:31:26.153682Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-10T12:31:26.153750Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-10T12:31:26.153800Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-10T12:31:26.153870Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-10T12:31:26.153910Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-10T12:31:26.153968Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-10T12:31:26.174055Z node 23 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:31:26.174146Z node 23 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-10T12:31:26.174218Z node 23 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-10T12:31:26.174333Z node 23 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:31:26.175644Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [23:302:2283], Recipient [23:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:26.175727Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:31:26.175787Z node 23 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [23:301:2282], serverId# [23:302:2283], sessionId# [0:0:0] 2026-01-10T12:31:26.176003Z node 23 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830214, Sender [23:300:2281], Recipient [23:238:2230]: NKikimrTabletBase.TEvGetCounters |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:31:11.600175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:31:11.600285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:11.600314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:31:11.600340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:31:11.600369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:31:11.600397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:31:11.600462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:31:11.600552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:31:11.601325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:31:11.601644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:31:11.698613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:31:11.698675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:31:11.715607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:31:11.715732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:31:11.715916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:31:11.724119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:31:11.724352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:31:11.725075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:31:11.725304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:31:11.727494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:11.727692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:31:11.728827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:11.728893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:11.728971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:31:11.729013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:31:11.729067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:31:11.729392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:31:11.882972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.883965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.884972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.885082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.885183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.885258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:31:11.885346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... operation id: 281474976710761:0 2026-01-10T12:31:26.950916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-10T12:31:26.951005Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:31:26.951061Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-10T12:31:26.951118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-10T12:31:26.951172Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-10T12:31:26.952885Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.953028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.958081Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:31:26.958160Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:31:26.958387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-10T12:31:26.958554Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:31:26.958628Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-10T12:31:26.958698Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2211], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 41 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-10T12:31:26.959947Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.960068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.960365Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:31:26.960439Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-10T12:31:26.960526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-10T12:31:26.961702Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.961826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.961875Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-10T12:31:26.961916Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-10T12:31:26.961958Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-10T12:31:26.962069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-10T12:31:26.962137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-10T12:31:26.964890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.966458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-10T12:31:26.966582Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-10T12:31:26.966650Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-10T12:31:26.969672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:31:26.969747Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:945:2893] TestWaitNotification: OK eventTxId 102 2026-01-10T12:31:26.970350Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:31:26.970665Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 342us result status StatusSuccess 2026-01-10T12:31:26.971389Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 39 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 13517, MsgBus: 15116 2026-01-10T12:27:11.898909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708473000276262:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:11.898961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:12.147501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:12.186439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:12.186593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:12.249031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:12.257061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:12.372795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:12.372816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:12.372825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:12.372936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:12.384325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:12.896608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:12.913066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:12.975753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:15.307917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708490180146631:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.308043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.308426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708490180146643:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.308488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708490180146644:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.308761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:15.312987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:15.327457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708490180146647:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-10T12:27:15.393820Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708490180146698:2761] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:15.813132Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2026-01-10T12:27:15.813185Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2026-01-10T12:27:15.827007Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7593708490180146739:2362], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01keky01f97tjtdpy89846ewqx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZWIwNzMzNTMtZmIxYTM2M2MtMzE1YWZkMDItOTkyZjRiNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-10T12:27:15.809285Z }, code: 2029 }. 2026-01-10T12:27:15.828383Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7593708490180146740:2363], TxId: 281474976710661, task: 2. Ctx: { CheckpointId : . TraceId : 01keky01f97tjtdpy89846ewqx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZWIwNzMzNTMtZmIxYTM2M2MtMzE1YWZkMDItOTkyZjRiNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7593708490180146728:2347], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-10T12:27:15.833136Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZWIwNzMzNTMtZmIxYTM2M2MtMzE1YWZkMDItOTkyZjRiNzQ=, ActorId: [1:7593708490180146628:2347], ActorState: ExecuteState, LegacyTraceId: 01keky01f97tjtdpy89846ewqx, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-10T12:27:15.809285Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-tjszgfszoq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-10T12:27:15.809285Z } , code: 2029 Trying to start YDB, gRPC: 29611, MsgBus: 16886 2026-01-10T12:27:16.729205Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593708494958600706:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:16.729260Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:16.775725Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:16.887105Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:16.916643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:16.916745Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:16.947516Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:17.016979Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:17.017007Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:17.017016Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:17.017097Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:17.026036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:17.471632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:17.478923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:27:17.490061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... liedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7593709494631961612:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-10T12:31:09.047376Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky75943mbd76khavb7n8c7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 462ms" severity: 1 }{ message: "Cancelling after 464ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:09.528501Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709494631961648:2526] TxId: 281474976711052. Ctx: { TraceId: 01keky75r7dyv6smrsd41yqzm9, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 463ms } {
: Error: Cancelling after 464ms during execution } ] trace_id# 2026-01-10T12:31:09.528887Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky75r7dyv6smrsd41yqzm9, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 463ms" severity: 1 }{ message: "Cancelling after 464ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:10.018217Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky767g6nzebm4vrrghhnfy, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 464ms" severity: 1 }{ message: "Cancelling after 465ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:11.662552Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky77tr5419f26v1cg5w906, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 468ms" severity: 1 }{ message: "Cancelling after 468ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:12.143703Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky789w7eht3r6wyfe2daem, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 469ms" severity: 1 }{ message: "Cancelling after 466ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:13.568527Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709511811831108:2526] TxId: 281474976711064. Ctx: { TraceId: 01keky79p506065qg2gcdvv2f9, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 472ms } {
: Error: Cancelling after 474ms during execution } ] trace_id# 2026-01-10T12:31:13.568693Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7593709511811831125:5749], TxId: 281474976711064, task: 9. Ctx: { CheckpointId : . TraceId : 01keky79p506065qg2gcdvv2f9. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7593709511811831108:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-10T12:31:13.570478Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky79p506065qg2gcdvv2f9, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 472ms" severity: 1 }{ message: "Cancelling after 474ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:14.496326Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7ak740y7t78t22j8yv8y, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 474ms" severity: 1 }{ message: "Cancelling after 471ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:14.993141Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709516106798499:2526] TxId: 281474976711068. Ctx: { TraceId: 01keky7b2h2kfzm58a41zy55sy, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 475ms } {
: Error: Cancelling after 476ms during execution } ] trace_id# 2026-01-10T12:31:14.994446Z node 5 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:2862} ActorId: [5:7593709516106798499:2526] TxId: 281474976711068. Ctx: { TraceId: 01keky7b2h2kfzm58a41zy55sy, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Unexpected event while waiting for shutdown event_type# NYql::NDq::TEvDqCompute::TEvResumeExecution trace_id# 2026-01-10T12:31:14.994828Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7b2h2kfzm58a41zy55sy, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 475ms" severity: 1 }{ message: "Cancelling after 476ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:15.591658Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709520401765831:2526] TxId: 281474976711070. Ctx: { TraceId: 01keky7bn7bf182p2738aq6z7g, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 476ms } {
: Error: Cancelling after 479ms during execution } ] trace_id# 2026-01-10T12:31:15.592059Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7bn7bf182p2738aq6z7g, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 476ms" severity: 1 }{ message: "Cancelling after 479ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:16.090865Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7c4x1mk8znaqs5y40p83, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 477ms" severity: 1 }{ message: "Cancelling after 477ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:17.217243Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709528991700562:2526] TxId: 281474976711076. Ctx: { TraceId: 01keky7d7w0xhwqx6497ftbqk0, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 480ms } {
: Error: Cancelling after 484ms during execution } ] trace_id# 2026-01-10T12:31:17.217422Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7593709528991700576:5835], TxId: 281474976711076, task: 8. Ctx: { CheckpointId : . TraceId : 01keky7d7w0xhwqx6497ftbqk0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7593709528991700562:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-10T12:31:17.217478Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7593709528991700577:5836], TxId: 281474976711076, task: 9. Ctx: { CheckpointId : . TraceId : 01keky7d7w0xhwqx6497ftbqk0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7593709528991700562:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-10T12:31:17.218074Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7d7w0xhwqx6497ftbqk0, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 480ms" severity: 1 }{ message: "Cancelling after 484ms during execution" severity: 1 } trace_id# 2026-01-10T12:31:20.524724Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7gf582xndnqqkqh3bjg7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 487ms" severity: 1 }{ message: "Cancelling after 486ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:21.108634Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7h193gtkfagpzqzftvds, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 489ms" severity: 1 }{ message: "Cancelling after 490ms during compilation" severity: 1 } trace_id# 2026-01-10T12:31:23.012590Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTY4MGY0ZGYtOWI0OTVkNDUtYWU0MTg0MTUtNzkxMDJkMDA=, ActorId: [5:7593709099494961251:2526], ActorState: ExecuteState, LegacyTraceId: 01keky7jwj34zrtvkygmsbp1nq, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 494ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } trace_id# |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |96.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-10T12:29:28.779530Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-10T12:29:28.805070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-10T12:29:28.805300Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-10T12:29:28.812618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-10T12:29:28.812820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-10T12:29:28.813036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-10T12:29:28.813199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-10T12:29:28.813304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-10T12:29:28.813371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-10T12:29:28.813449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-10T12:29:28.813700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-10T12:29:28.813790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-10T12:29:28.813876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.813944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-10T12:29:28.814046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-10T12:29:28.814132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-10T12:29:28.841168Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-10T12:29:28.841486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-10T12:29:28.841530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-10T12:29:28.841725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:28.841920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-10T12:29:28.842023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-10T12:29:28.842071Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-10T12:29:28.842169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-10T12:29:28.842233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-10T12:29:28.842281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-10T12:29:28.842337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-10T12:29:28.842488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-10T12:29:28.842635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-10T12:29:28.842668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-10T12:29:28.842710Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-10T12:29:28.842803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-10T12:29:28.842841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-10T12:29:28.842882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-10T12:29:28.842911Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-10T12:29:28.842963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-10T12:29:28.843072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-10T12:29:28.843140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-10T12:29:28.843195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-10T12:29:28.843261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-10T12:29:28.843299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-10T12:29:28.843555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-10T12:29:28.843597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-10T12:29:28.843617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-10T12:29:28.843717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-10T12:29:28.843747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.843770Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-10T12:29:28.843812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-10T12:29:28.843858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-10T12:29:28.843883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-10T12:29:28.843916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-10T12:29:28.843945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-10T12:29:28.843966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-10T12:29:28.844061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-10T12:29:28.844127Z node 1 :TX_COLUMNSHARD WAR ... 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-10T12:31:27.427086Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-10T12:31:27.427665Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:31:27.427891Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.427936Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-10T12:31:27.428107Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-10T12:31:27.428187Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-10T12:31:27.428493Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-10T12:31:27.428665Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.428851Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.428995Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.429302Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-10T12:31:27.429446Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.429585Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.429853Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2026-01-10T12:31:27.430469Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":119198518,"name":"_full_task","f":119198518,"d_finished":0,"c":0,"l":119211670,"d":13152},"events":[{"name":"bootstrap","f":119198855,"d_finished":1507,"c":1,"l":119200362,"d":1507},{"a":119211032,"name":"ack","f":119209391,"d_finished":1387,"c":1,"l":119210778,"d":2025},{"a":119211019,"name":"processing","f":119200523,"d_finished":3690,"c":3,"l":119210780,"d":4341},{"name":"ProduceResults","f":119199870,"d_finished":2453,"c":6,"l":119211359,"d":2453},{"a":119211365,"name":"Finish","f":119211365,"d_finished":0,"c":0,"l":119211670,"d":305},{"name":"task_result","f":119200541,"d_finished":2237,"c":2,"l":119208864,"d":2237}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.430545Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-10T12:31:27.431059Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":119198518,"name":"_full_task","f":119198518,"d_finished":0,"c":0,"l":119212340,"d":13822},"events":[{"name":"bootstrap","f":119198855,"d_finished":1507,"c":1,"l":119200362,"d":1507},{"a":119211032,"name":"ack","f":119209391,"d_finished":1387,"c":1,"l":119210778,"d":2695},{"a":119211019,"name":"processing","f":119200523,"d_finished":3690,"c":3,"l":119210780,"d":5011},{"name":"ProduceResults","f":119199870,"d_finished":2453,"c":6,"l":119211359,"d":2453},{"a":119211365,"name":"Finish","f":119211365,"d_finished":0,"c":0,"l":119212340,"d":975},{"name":"task_result","f":119200541,"d_finished":2237,"c":2,"l":119208864,"d":2237}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-10T12:31:27.431141Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-10T12:31:27.413944Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-10T12:31:27.431186Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-10T12:31:27.431342Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] |96.6%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId 2026-01-10 12:31:38,577 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:31:38,829 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 391530 58.6M 57.7M 32.5M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/0017e9/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.ar 391716 1.7G 1.7G 1.2G └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/0017e9/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_s Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:21:42.112874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:42.188385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:42.194594Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:21:42.195001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:21:42.195091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:21:42.520486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:42.607156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:42.607247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:42.640647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:42.712215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:21:43.325539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:21:43.325901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:21:43.325927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:21:43.325958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:21:43.326242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:21:43.387151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:21:43.909864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:21:46.240131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:21:46.240199Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:21:46.248354Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:21:46.274800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.274926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.313771Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:21:46.314798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:46.471605Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:21:46.471711Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:21:46.472895Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.473567Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.474292Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475046Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475136Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475264Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475331Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475442Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.475546Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:21:46.490115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:21:46.680950Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:21:46.710235Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:21:46.710338Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:21:46.732527Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:21:46.733558Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:21:46.733742Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:21:46.733782Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:21:46.733817Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:21:46.733861Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:21:46.733920Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:21:46.733974Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:21:46.734627Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:21:46.751644Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:46.751747Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2125:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:21:46.767304Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2162:2620] 2026-01-10T12:21:46.768456Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2162:2620], schemeshard id = 72075186224037897 2026-01-10T12:21:46.835293Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2252:2667] 2026-01-10T12:21:46.837770Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:21:46.848892Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Describe result: PathErrorUnknown 2026-01-10T12:21:46.848949Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Creating table 2026-01-10T12:21:46.849027Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:21:46.854625Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2309:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:21:46.858504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:21:46.870753Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:21:46.870923Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Subscribe on create table tx: 281474976720657 2026-01-10T12:21:46.882475Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:21:47.046620Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:21:47.114231Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:21:47.394938Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:21:47.480382Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:21:47.480499Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2258:2671] Owner: [2:2257:2670]. Column diff is empty, finishing 2026-01-10T12:21:48.088499Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... OwnerId: [2:5008:4456], ActorId: [2:5009:4457], Got response [2:5010:4458] SUCCESS 2026-01-10T12:28:15.194091Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:28:15.245531Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:28:15.245587Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId2, ActorId=[1:3154:3432] 2026-01-10T12:28:16.409993Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:28:16.411259Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:28:16.414598Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:28:16.414770Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], Start read next stream part 2026-01-10T12:28:16.501257Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [2:5094:4510]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:28:16.501537Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2026-01-10T12:28:16.501574Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 8, ReplyToActorId = [2:5094:4510], StatRequests.size() = 1 2026-01-10T12:29:24.571524Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01keky1x4y407k5rn9ere2j6nw", SessionId: ydb://session/3?node_id=2&id=ZWU3YWI2MGQtZjdlYzEwZTMtNjkxOGUyZTctZGIzODQ0ODU=, Slow query, duration: 68.152861s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:29:24.572371Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:29:24.572494Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], Start read next stream part 2026-01-10T12:29:24.572731Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35000, txId: 18446744073709551615] shutting down 2026-01-10T12:29:24.572802Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:29:24.575090Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:29:24.575135Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5083:4500], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA1ZmQ4NjYtOTZiZjliNTUtOTcwNzAzZWEtY2E4ZWU3ODg=, TxId: 2026-01-10T12:29:24.576014Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-10T12:29:24.576125Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], Start read next stream part 2026-01-10T12:29:24.627591Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [2:5204:4609]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:29:24.627841Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2026-01-10T12:29:24.627873Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 9, ReplyToActorId = [2:5204:4609], StatRequests.size() = 1 2026-01-10T12:30:17.836841Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01keky3zq05basbzmj1kjx1202", SessionId: ydb://session/3?node_id=2&id=YjYyMTkzZGItOGRkNTg3YzgtNjg4YjRjZmQtZWU4ZDZlODc=, Slow query, duration: 53.257492s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-10T12:30:17.838945Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:30:17.839045Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], Start read next stream part 2026-01-10T12:30:17.839409Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35100, txId: 18446744073709551615] shutting down 2026-01-10T12:30:17.839757Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:30:17.839817Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5081:2461], ActorId: [2:5186:4594], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWM3MjUwOTAtM2ZlODc0YWQtNTZhMTRlYWEtNDg4NWUwYjQ=, TxId: 2026-01-10T12:30:17.840184Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5294:4693], ActorId: [2:5298:4696], Starting query actor #1 [2:5299:4697] 2026-01-10T12:30:17.840227Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5298:4696], ActorId: [2:5299:4697], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-10T12:30:17.844708Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5298:4696], ActorId: [2:5299:4697], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGRiZjY1OTUtNDVlODUyNTQtMWYzNThlMzYtYmNlNjgyNzI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:30:17.897620Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5298:4696], ActorId: [2:5299:4697], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGRiZjY1OTUtNDVlODUyNTQtMWYzNThlMzYtYmNlNjgyNzI=, TxId: 2026-01-10T12:30:17.897703Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5298:4696], ActorId: [2:5299:4697], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGRiZjY1OTUtNDVlODUyNTQtMWYzNThlMzYtYmNlNjgyNzI=, TxId: 2026-01-10T12:30:17.898120Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5294:4693], ActorId: [2:5298:4696], Got response [2:5299:4697] SUCCESS 2026-01-10T12:30:17.898588Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-10T12:30:17.954422Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-10T12:30:17.954495Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId3, ActorId=[1:3154:3432] 2026-01-10T12:30:19.124620Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:30:19.126749Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5369:2461], ActorId: [2:5371:4741], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:30:19.130420Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5369:2461], ActorId: [2:5371:4741], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-10T12:30:19.130584Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5369:2461], ActorId: [2:5371:4741], Start read next stream part 2026-01-10T12:30:19.192369Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 10 ], ReplyToActorId[ [2:5382:4751]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:30:19.192807Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 10 ] 2026-01-10T12:30:19.192864Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 10, ReplyToActorId = [2:5382:4751], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/0017e9/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/0017e9/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |96.6%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |96.6%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> TNodeBrokerTest::TestRandomActions [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2026-01-10T12:29:51.595161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:51.595252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-10T12:29:52.515968Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.557844Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.583631Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.585384Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.585832Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.626288Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.692855Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.810689Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:29:52.812240Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-10T12:29:52.812846Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-10T12:29:52.814810Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:29:53.500937Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:29:53.890823Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.891629Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.944502Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.949297Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.949965Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.950487Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.951354Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.951932Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.952386Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.953171Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.955699Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.956286Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:53.957359Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.958388Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-10T12:29:53.958884Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.004396Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.008916Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.009397Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.009739Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.010512Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.010940Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.011691Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.012449Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.012764Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.013162Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.013536Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.014084Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.014396Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.015028Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.015411Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.015722Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.016130Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.017081Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.017363Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.017746Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.018461Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.018868Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.019254Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-10T12:29:54.019575Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:29:54.502659Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.533962Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.535060Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.595282Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.599994Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.600896Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.694249Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.760904Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:29:54.812213Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:29:55.527317Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2026-01-10T12:29:55.528558Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2026-01-10T12:29:55.529043Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2026-01-10T12:29:55.543978Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2026-01-10T12:29:55.544403Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2026-01-10T12:29:55.544909Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2026-01-10T12:29:55.962983Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.310281Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.310734Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.352482Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.357418Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.357775Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.358193Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.358506Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.764897Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.765364Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:29:56.808372Z node 1 :NOD ... e.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:35.033394Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:35.092914Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:31:35.543279Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:35.970000Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:35.972363Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:36.117843Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:36.241026Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:36.255337Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.324006Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.326556Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.329127Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.332227Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.482286Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.497690Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.504296Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.509486Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:37.967667Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-10T12:31:38.888184Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:38.890724Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.162080Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.164979Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.207062Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.234959Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.239825Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.397529Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.400779Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.403395Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.415072Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.458129Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.463656Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-10T12:31:39.466442Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2026-01-10T12:31:40.232551Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2026-01-10T12:31:40.235302Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Node has expired 2026-01-10T12:31:40.848758Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-10T12:31:41.451940Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-10T12:31:41.474817Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:41.505642Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:42.920407Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:31:42.971117Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.010365Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.771845Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.774207Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.803022Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.805463Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:43.862085Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:44.730525Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:44.733599Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-10T12:31:45.167715Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.157715Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.183471Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.187276Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.191332Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.196394Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.201247Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.303622Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.307544Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.311945Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.804151Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.844687Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.850263Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.852467Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-10T12:31:46.872301Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:31:46.877160Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2026-01-10T12:31:46.878941Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:31:46.880499Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:31:46.888900Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-10T12:31:46.890983Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-10T12:31:47.296342Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:47.683624Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:47.685834Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-10T12:31:47.723727Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:31:47.726046Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.206164Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.209603Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.212839Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.259984Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.686787Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-10T12:31:48.688842Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:135:2157] sender: [1:136:2057] recipient: [1:112:2143] 2026-01-10T12:30:40.780273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.781403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.781451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.801085Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.801435Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.801722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.852233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.866638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.872447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.875401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.875481Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.875543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.880335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.880578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.880652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2157] in generation 2 Leader for TabletID 9437184 is [1:135:2157] sender: [1:212:2057] recipient: [1:14:2061] 2026-01-10T12:30:40.981165Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.017467Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.017682Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.017828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:30:41.017892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.017931Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.017969Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.018137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.018197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.018477Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.018572Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.018731Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.018790Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.018838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.018891Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.018940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.018977Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.019024Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.019152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.019208Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.019253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:30:41.027542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.027643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.027747Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.028039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.028528Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.031466Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.031536Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.031578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.031608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.031640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.031880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.031918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.031949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.031975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.032025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.032053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.032118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.032183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.032211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.045095Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.045178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.045219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.045301Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.048542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.053698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:30:41.053983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.054019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.054172Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.054226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-10T12:30:41.054277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.054330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.058341Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.058407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.058605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058711Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.058772Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.058827Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... HARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-10T12:31:52.486150Z node 36 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:31:52.486178Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:31:52.486216Z node 36 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [7:6] in PlanQueue unit at 9437186 2026-01-10T12:31:52.486250Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PlanQueue 2026-01-10T12:31:52.486288Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.486324Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2026-01-10T12:31:52.486360Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadTxDetails 2026-01-10T12:31:52.486391Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2026-01-10T12:31:52.487092Z node 36 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2026-01-10T12:31:52.487149Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487183Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2026-01-10T12:31:52.487217Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2026-01-10T12:31:52.487248Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2026-01-10T12:31:52.487293Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487323Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2026-01-10T12:31:52.487351Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2026-01-10T12:31:52.487380Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2026-01-10T12:31:52.487429Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [7:6] is the new logically complete end at 9437186 2026-01-10T12:31:52.487467Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [7:6] is the new logically incomplete end at 9437186 2026-01-10T12:31:52.487504Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [7:6] at 9437186 2026-01-10T12:31:52.487551Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487580Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2026-01-10T12:31:52.487610Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2026-01-10T12:31:52.487639Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2026-01-10T12:31:52.487689Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487720Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2026-01-10T12:31:52.487749Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2026-01-10T12:31:52.487779Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2026-01-10T12:31:52.487808Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487839Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2026-01-10T12:31:52.487869Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2026-01-10T12:31:52.487896Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2026-01-10T12:31:52.487936Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.487964Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2026-01-10T12:31:52.487992Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2026-01-10T12:31:52.488021Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2026-01-10T12:31:52.488052Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.488364Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2026-01-10T12:31:52.488407Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BlockFailPoint 2026-01-10T12:31:52.488438Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BlockFailPoint 2026-01-10T12:31:52.488475Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.488505Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BlockFailPoint 2026-01-10T12:31:52.488533Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2026-01-10T12:31:52.488562Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2026-01-10T12:31:52.488935Z node 36 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2026-01-10T12:31:52.488996Z node 36 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:31:52.489057Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.489091Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2026-01-10T12:31:52.489121Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompleteOperation 2026-01-10T12:31:52.489152Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2026-01-10T12:31:52.489365Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is DelayComplete 2026-01-10T12:31:52.489408Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2026-01-10T12:31:52.489445Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompletedOperations 2026-01-10T12:31:52.489481Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2026-01-10T12:31:52.489522Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-10T12:31:52.489552Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2026-01-10T12:31:52.489582Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [7:6] at 9437186 has finished 2026-01-10T12:31:52.489621Z node 36 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:31:52.489657Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-10T12:31:52.489695Z node 36 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-10T12:31:52.489731Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-10T12:31:52.504649Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2026-01-10T12:31:52.504730Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2026-01-10T12:31:52.504793Z node 36 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:31:52.504854Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2026-01-10T12:31:52.504930Z node 36 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [36:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:31:52.504986Z node 36 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:31:52.505308Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2026-01-10T12:31:52.505349Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2026-01-10T12:31:52.505395Z node 36 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-10T12:31:52.505430Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2026-01-10T12:31:52.505478Z node 36 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [36:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:31:52.505516Z node 36 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-10T12:31:52.505747Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2026-01-10T12:31:52.505787Z node 36 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2026-01-10T12:31:52.505829Z node 36 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-10T12:31:52.505862Z node 36 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2026-01-10T12:31:52.505910Z node 36 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [36:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:31:52.505946Z node 36 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229 folder_id=folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229 iam_token=usr_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229 cloud_account=acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229 2026-01-10T12:31:09.358129Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229]","tx_id":"281474976720695","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:09.458827Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229/000000000000000104od]","tx_id":"281474976720701","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:09.494131Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229/000000000000000104od/v2]","tx_id":"281474976720702","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:09.600731Z: {"request_id":"f96dde12-2c1406f6-344df705-b5ecc1cc","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229"} ======================================== 2026-01-10T12:31:10.610739Z: {"request_id":"f96dde12-2c1406f6-344df705-b5ecc1cc","permission":"ymq.queues.create","id":"1064024803772143760$CreateMessageQueue$2026-01-10T12:31:10.610527Z","idempotency_id":"1064024803772143760$CreateMessageQueue$2026-01-10T12:31:09.399000Z","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:09.399000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3da355ee-ee20-11f0-830d-d00d67ac8229.fifo","resource_id":"000000000000000104od","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:11.459238Z: {"request_id":"64ee164a-eff993a5-f8cc938a-a8eaf7c1","permission":"ymq.queues.setAttributes","id":"1747325596871271650$UpdateMessageQueue$2026-01-10T12:31:11.459010Z","idempotency_id":"1747325596871271650$UpdateMessageQueue$2026-01-10T12:31:10.716000Z","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:10.716000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3da355ee-ee20-11f0-830d-d00d67ac8229.fifo","resource_id":"000000000000000104od","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:12.886521Z: {"request_id":"c0888494-1de1a547-e981b58b-18c5d57d","permission":"ymq.queues.setAttributes","id":"6186041280928681932$UpdateMessageQueue$2026-01-10T12:31:12.886371Z","idempotency_id":"6186041280928681932$UpdateMessageQueue$2026-01-10T12:31:11.844000Z","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:11.844000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3da355ee-ee20-11f0-830d-d00d67ac8229.fifo","resource_id":"000000000000000104od","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:13.034902Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229/000000000000000104od/v2]","tx_id":"281474976720719","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:13.073614Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229/000000000000000104od]","tx_id":"281474976720720","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:13.110656Z: {"request_id":"71ae5814-8b82c881-4423375e-f440f750","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","subject":"fake_user_sid@as","queue":"000000000000000104od","resource_id":"000000000000000104od","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229"} ======================================== 2026-01-10T12:31:14.927154Z: {"request_id":"71ae5814-8b82c881-4423375e-f440f750","permission":"ymq.queues.delete","id":"1379114937282387363$DeleteMessageQueue$2026-01-10T12:31:14.926975Z","idempotency_id":"1379114937282387363$DeleteMessageQueue$2026-01-10T12:31:12.954000Z","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:12.954000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3da355ee-ee20-11f0-830d-d00d67ac8229.fifo","resource_id":"000000000000000104od","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:14.940382Z: {"request_id":"71ae5814-8b82c881-4423375e-f440f750","permission":"ymq.queues.delete","id":"1379114937282387363$DeleteMessageQueue$2026-01-10T12:31:14.940215Z","idempotency_id":"1379114937282387363$DeleteMessageQueue$2026-01-10T12:31:12.954000Z","cloud_id":"CLOUD_FOR_folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:12.954000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_3da355ee-ee20-11f0-830d-d00d67ac8229.fifo","resource_id":"000000000000000104od","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_3dc9eaf3-ee20-11f0-91b9-d00d67ac8229","component":"ymq"} ======================================== ======================================== run test with cloud_id=CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229 folder_id=folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229 iam_token=usr_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229 cloud_account=acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229 2026-01-10T12:31:25.292825Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229]","tx_id":"281474976720735","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:25.390937Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229/0000000000000003052c]","tx_id":"281474976720741","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:25.410844Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229/0000000000000003052c/v4]","tx_id":"281474976720742","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:25.484039Z: {"request_id":"894a35b1-c7cac067-3dbb2749-5f422852","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229"} ======================================== 2026-01-10T12:31:27.055762Z: {"request_id":"894a35b1-c7cac067-3dbb2749-5f422852","permission":"ymq.queues.create","id":"12751966071856317604$CreateMessageQueue$2026-01-10T12:31:27.055492Z","idempotency_id":"12751966071856317604$CreateMessageQueue$2026-01-10T12:31:25.346000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:25.346000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:27.056214Z: {"request_id":"5aa6e473-1b08155f-9d48ff6f-8f9f2193","permission":"ymq.queues.setAttributes","id":"210999613443149812$UpdateMessageQueue$2026-01-10T12:31:27.055579Z","idempotency_id":"210999613443149812$UpdateMessageQueue$2026-01-10T12:31:26.554000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:26.554000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:28.735333Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229/0000000000000003052c/v4]","tx_id":"281474976720760","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:28.759151Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229/0000000000000003052c]","tx_id":"281474976720761","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-10T12:31:28.780921Z: {"request_id":"95a2b787-637c9e9f-eee86387-45d2992e","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","subject":"fake_user_sid@as","queue":"0000000000000003052c","resource_id":"0000000000000003052c","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229"} ======================================== 2026-01-10T12:31:29.081090Z: {"request_id":"cac4b298-f8b53f29-bb649047-cc3801a7","permission":"ymq.queues.setAttributes","id":"4763485426823732103$UpdateMessageQueue$2026-01-10T12:31:29.080851Z","idempotency_id":"4763485426823732103$UpdateMessageQueue$2026-01-10T12:31:27.622000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:27.622000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:29.081392Z: {"request_id":"cac4b298-f8b53f29-bb649047-cc3801a7","permission":"ymq.queues.setAttributes","id":"4763485426823732103$UpdateMessageQueue$2026-01-10T12:31:29.081147Z","idempotency_id":"4763485426823732103$UpdateMessageQueue$2026-01-10T12:31:27.622000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:27.622000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:29.081485Z: {"request_id":"95a2b787-637c9e9f-eee86387-45d2992e","permission":"ymq.queues.delete","id":"16954249357074451391$DeleteMessageQueue$2026-01-10T12:31:29.080927Z","idempotency_id":"16954249357074451391$DeleteMessageQueue$2026-01-10T12:31:28.678000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:28.678000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== 2026-01-10T12:31:29.081773Z: {"request_id":"95a2b787-637c9e9f-eee86387-45d2992e","permission":"ymq.queues.delete","id":"16954249357074451391$DeleteMessageQueue$2026-01-10T12:31:29.081221Z","idempotency_id":"16954249357074451391$DeleteMessageQueue$2026-01-10T12:31:28.678000Z","cloud_id":"CLOUD_FOR_folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-10T12:31:28.678000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_4745721a-ee20-11f0-82b8-d00d67ac8229","resource_id":"0000000000000003052c","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_4756b9fa-ee20-11f0-9b89-d00d67ac8229","component":"ymq"} ======================================== ======================================== |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> test_ttl.py::TestTTLAlterSettings::test_case |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] Test command err: 2026-01-10T12:29:55.900367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:29:55.900448Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2026-01-10T12:30:01.965238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:01.965320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] Test command err: 2026-01-10T12:30:04.817780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:04.817869Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> THiveTest::TestCreateExternalTablet [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 62344, MsgBus: 21828 2026-01-10T12:26:57.842725Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708412632853792:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:26:57.842806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:26:57.878347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:26:58.187659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:26:58.197905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:26:58.198028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:26:58.220839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:26:58.340226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708412632853757:2081] 1768048017841254 != 1768048017841257 2026-01-10T12:26:58.356866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:26:58.465054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:26:58.465078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:26:58.465085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:26:58.465180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:26:58.474463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:26:58.858450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:26:59.007409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:26:59.013800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:26:59.062717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.207304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.367473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:26:59.462317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.564916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708429812724828:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.565076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.565457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708429812724838:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.565517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:01.928846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:01.975055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.053584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.133310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.166278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.235705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.277915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.329142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:02.432050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708434107693014:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.432230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.432970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708434107693019:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.433031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708434107693020:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.433163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:02.440882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:02.460041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708434107693023:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:02.518432Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708434107693074:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:02.845286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708412632853792:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:02.845357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:37.697567Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01keky9vvjb0fenrypftywe0m3, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 459ms" severity: 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:38.480550Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01keky9wm1998n5gd7xp1mfykc, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 461ms" severity: 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:39.268117Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01keky9xcm2fdpp8nxyh10ke4n, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 463ms" severity: 1 }{ message: "Cancelling after 463ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:40.129945Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01keky9y7k2kjvqgsqm83w7j3n, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 465ms" severity: 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:41.017052Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01keky9z335ctqj9xrdxe1h58f, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 467ms" severity: 1 }{ message: "Cancelling after 468ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:41.983114Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya0174tp945z4bvcm84d7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 469ms" severity: 1 }{ message: "Cancelling after 470ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:42.876478Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya0x2d7qsbacd8k170t3b, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 471ms" severity: 1 }{ message: "Cancelling after 472ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:43.372542Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya1cj8h8wnka5jwagptrs, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 472ms" severity: 1 }{ message: "Cancelling after 472ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:43.959548Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya1yx7gg7shbd3rc875v5, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 474ms" severity: 1 }{ message: "Cancelling after 473ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:44.451930Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [5:7593709905314746077:2551] TxId: 0. Ctx: { TraceId: 01kekya2e88rcqwwwnfx75gha0, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 475ms } {
: Error: Cancelling after 474ms during execution } ] trace_id# 2026-01-10T12:32:44.452295Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya2e88rcqwwwnfx75gha0, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 475ms" severity: 1 }{ message: "Cancelling after 474ms during execution" severity: 1 } trace_id# 2026-01-10T12:32:44.947635Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya2xndpvxsw8sdgjaq26d, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 476ms" severity: 1 }{ message: "Cancelling after 477ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:45.864760Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya3t93bz16bxa261eae31, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 478ms" severity: 1 }{ message: "Cancelling after 478ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:46.368251Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya49x1s07as4ybjpwa91x, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 479ms" severity: 1 }{ message: "Cancelling after 481ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:47.032513Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya4yka99btmdjjdap1ws9, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 481ms" severity: 1 }{ message: "Cancelling after 484ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:47.873784Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya5ry6thd0n3cn9nn8z31, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 483ms" severity: 1 }{ message: "Cancelling after 482ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:48.758439Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya6mge8048yx4fyese68x, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 485ms" severity: 1 }{ message: "Cancelling after 485ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:49.520502Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya7c4e3w6msr36fg047mh, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 487ms" severity: 1 }{ message: "Cancelling after 491ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:50.280457Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya83v105m2k5ma69saswx, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 489ms" severity: 1 }{ message: "Cancelling after 491ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:51.168782Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya8zk2bxvzxj4320a4vcf, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 491ms" severity: 1 }{ message: "Cancelling after 492ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:52.115763Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekya9x00qdcgghsfr5r8r28, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 493ms" severity: 1 }{ message: "Cancelling after 494ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:53.024719Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekyaasg7g7h14v0w6x5hqcg, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 495ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:53.920431Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekyabnd9ex7xgvjabgqcb1e, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 497ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } trace_id# 2026-01-10T12:32:54.716661Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NGMxOTk1OWUtNjUxOTU4MzEtZGEwMDViZGMtNDg3ZDdlMmM=, ActorId: [5:7593709415688470161:2551], ActorState: ExecuteState, LegacyTraceId: 01kekyacea19nhaheh7ksazgtg, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 499ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2026-01-10T12:26:00.630474Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-10T12:26:00.659814Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:00.660299Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-10T12:26:00.661165Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-10T12:26:00.661585Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-10T12:26:00.662771Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-10T12:26:00.662837Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-10T12:26:00.663836Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-10T12:26:00.663877Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-10T12:26:00.664032Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-10T12:26:00.664376Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-10T12:26:00.678890Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-10T12:26:00.678955Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-10T12:26:00.681382Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.681575Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.681754Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.681892Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.682044Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.682190Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.682356Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-10T12:26:00.682382Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-10T12:26:00.682461Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-10T12:26:00.682511Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-10T12:26:00.682558Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-10T12:26:00.682616Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-10T12:26:00.683378Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-10T12:26:00.716359Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:00.716443Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:00.718319Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:00.718500Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:00.718551Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-10T12:26:00.719391Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:00.719939Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:00.719990Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-10T12:26:00.730749Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-10T12:26:00.730816Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-10T12:26:00.731037Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-10T12:26:00.731066Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-10T12:26:00.731142Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-10T12:26:00.731203Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-10T12:26:00.731246Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-10T12:26:00.731287Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:00.731477Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-10T12:26:00.731542Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:00.731698Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:00.731847Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-10T12:26:00.731891Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-10T12:26:00.731946Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-10T12:26:00.737891Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-10T12:26:00.738157Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-10T12:26:00.738246Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:26:00.739187Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:26:00.739341Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:26:00.739759Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-10T12:26:00.739901Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-10T12:26:00.739955Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-10T12:26:00.746136Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:00.746456Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-10T12:26:00.746672Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-10T12:26:00.746834Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-10T12:26:00.747085Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-10T12:26:00.747132Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-10T12:26:00.747159Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-10T12:26:00.747299Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:26:00.750020Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-10T12:26:00.750349Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-10T12:26:00.750400Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-10T12:26:00.750430Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-10T12:26:00.750521Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest: ... 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [155:271:2264] 2026-01-10T12:32:51.846727Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [155:271:2264] 2026-01-10T12:32:51.846795Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [155:271:2264] 2026-01-10T12:32:51.846888Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [155:271:2264] 2026-01-10T12:32:51.846960Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [155:271:2264] 2026-01-10T12:32:51.847123Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [155:270:2263] EventType# 268697601 2026-01-10T12:32:51.847448Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2026-01-10T12:32:51.847551Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:32:51.848429Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2026-01-10T12:32:51.848545Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:32:51.848709Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [155:312:2292] 2026-01-10T12:32:51.848749Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [155:312:2292] 2026-01-10T12:32:51.848831Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [155:93:2123] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:32:51.848897Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 155 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [155:93:2123] 2026-01-10T12:32:51.848993Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [155:312:2292] 2026-01-10T12:32:51.849044Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [155:312:2292] 2026-01-10T12:32:51.849093Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [155:312:2292] 2026-01-10T12:32:51.849201Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [155:312:2292] 2026-01-10T12:32:51.849370Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [155:312:2292] 2026-01-10T12:32:51.849413Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [155:312:2292] 2026-01-10T12:32:51.849443Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [155:312:2292] 2026-01-10T12:32:51.849499Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [155:280:2269] EventType# 268637702 2026-01-10T12:32:51.849666Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-10T12:32:51.849763Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:32:51.850034Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:32:51.850136Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:32:51.850462Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-10T12:32:51.850560Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:32:51.851301Z node 155 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{136489207671872}(72075186224037888)::Execute - TryToBoot was not successfull 2026-01-10T12:32:51.851463Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2026-01-10T12:32:51.851567Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:32:51.863851Z node 155 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [185eac4b9c06d110] bootstrap ActorId# [155:315:2295] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-10T12:32:51.864008Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [185eac4b9c06d110] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-10T12:32:51.864087Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [185eac4b9c06d110] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-10T12:32:51.864151Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2026-01-10T12:32:51.864204Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2026-01-10T12:32:51.864348Z node 155 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [155:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-10T12:32:51.868963Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-10T12:32:51.869112Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-10T12:32:51.869181Z node 155 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-10T12:32:51.869439Z node 155 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-10T12:32:51.869667Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2026-01-10T12:32:51.870059Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:32:51.870214Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:32:51.870278Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:32:51.870326Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:32:51.870378Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.870457Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.870511Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.870955Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [155:319:2298] 2026-01-10T12:32:51.871055Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [155:319:2298] 2026-01-10T12:32:51.871273Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-10T12:32:51.871448Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-10T12:32:51.871640Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-10T12:32:51.871735Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-10T12:32:51.871779Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-10T12:32:51.871857Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.871946Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.871992Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-10T12:32:51.872135Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-10T12:32:51.872288Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [155:319:2298] 2026-01-10T12:32:51.872364Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [155:319:2298] |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |96.7%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> KqpLimits::QSReplySize-useSink [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |96.7%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 5346, MsgBus: 8045 2026-01-10T12:27:09.556902Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708463735796485:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:09.557261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:09.698609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:27:09.960852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:09.989797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:09.989951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:10.007605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:10.057529Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:10.059100Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708463735796269:2081] 1768048029507720 != 1768048029507723 2026-01-10T12:27:10.155064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:10.174321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:10.174348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:10.174363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:10.174464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:10.551578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:10.635520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:10.700557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:10.911537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:11.135754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:11.237352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.126980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708480915668401:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.127130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.127617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708480915668411:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.127668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:13.469110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.554672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.609291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.685629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.743016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.816757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.886614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:13.980096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:14.132418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708485210637201:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.132523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.132833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708485210637206:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.132870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708485210637207:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.132899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:14.138341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:14.164530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708485210637210:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:14.236044Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708485210637293:5449] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:14.575601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708463735796485:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:14.575898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:15.757505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... 32:48.192907Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:32:48.379316Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:32:48.379343Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:32:48.379353Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:32:48.379443Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:32:48.508641Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:32:48.924565Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:32:49.162611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:32:49.170336Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:32:49.181098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:49.314172Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:49.583588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:49.715864Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:52.891785Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593709935524163124:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:52.891908Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:52.892467Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593709935524163134:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:52.892546Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:52.913997Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593709914049324799:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:32:52.914095Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:32:52.993272Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.045349Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.092813Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.134856Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.223912Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.299678Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.354088Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.443660Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:32:53.593446Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593709939819131305:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:53.593549Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:53.593858Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593709939819131311:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:53.593913Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593709939819131310:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:53.593951Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:32:53.599656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:32:53.625181Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593709939819131314:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:32:53.723263Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593709939819131365:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:32:56.660463Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:33:03.116531Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:33:03.116576Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:33:12.669139Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=YjNiOTc5Yy0yMGIyOTgzMC1kMDBjNWQwNi05YWFiNzFkZg==, ActorId: [5:7593710008538609404:2719], ActorState: ExecuteState, LegacyTraceId: 01kekyavrtem4gs8n0fny73m8n, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Intermediate data materialization exceeded size limit (88240889 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data." issue_code: 2013 severity: 1 } trace_id#
: Error: Intermediate data materialization exceeded size limit (88240889 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 |96.8%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_solomon >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:33:35.599896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:33:35.600025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:33:35.600093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:33:35.600140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:33:35.600208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:33:35.600265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:33:35.600398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:33:35.600499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:33:35.601518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:33:35.603461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:33:35.724210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:33:35.724463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:33:35.747777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:33:35.748036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:33:35.751007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:33:35.770930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:33:35.771370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:33:35.781857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:33:35.786020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:33:35.814179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:33:35.820311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:33:35.846048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:33:35.846209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:33:35.846340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:33:35.846423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:33:35.846600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:33:35.847085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:33:35.866987Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2026-01-10T12:33:36.019957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-10T12:33:36.021333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:33:36.024474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-10T12:33:36.024579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-10T12:33:36.026131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-10T12:33:36.026253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:33:36.037668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-10T12:33:36.039908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-10T12:33:36.040296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:33:36.040456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-10T12:33:36.040513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-10T12:33:36.040564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-10T12:33:36.045274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:33:36.045423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-10T12:33:36.045473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-10T12:33:36.049108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:33:36.049179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-10T12:33:36.049235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:33:36.049299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-10T12:33:36.060768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:33:36.065417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-10T12:33:36.066721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-10T12:33:36.068019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:33:36.068213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:33:36.068291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:33:36.068714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-10T12:33:36.068780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-10T12:33:36.069021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-10T12:33:36.069849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-10T12:33:36.078334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:33:36.078407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 53Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:33:37.366964Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-10T12:33:37.367350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-10T12:33:37.367600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409546 2026-01-10T12:33:37.370286Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2026-01-10T12:33:37.371945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-10T12:33:37.372194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2026-01-10T12:33:37.373016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:33:37.373069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:33:37.373205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-10T12:33:37.375178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-10T12:33:37.375327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:33:37.375366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-10T12:33:37.375472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-10T12:33:37.376746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-10T12:33:37.376808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2026-01-10T12:33:37.379001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-10T12:33:37.379055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2026-01-10T12:33:37.379166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-10T12:33:37.379205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2026-01-10T12:33:37.379715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-10T12:33:37.380757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-10T12:33:37.381122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-10T12:33:37.381164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-10T12:33:37.381639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-10T12:33:37.381743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-10T12:33:37.381792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:924:2783] TestWaitNotification: OK eventTxId 106 2026-01-10T12:33:37.382361Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:33:37.382578Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 238us result status StatusPathDoesNotExist 2026-01-10T12:33:37.382803Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:33:37.383372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:33:37.383599Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 201us result status StatusPathDoesNotExist 2026-01-10T12:33:37.383733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:33:37.384391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:33:37.384567Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 197us result status StatusSuccess 2026-01-10T12:33:37.385048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2026-01-10T12:33:37.386884Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2026-01-10T12:33:37.386968Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2026-01-10T12:33:37.387033Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2026-01-10T12:33:37.387095Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] |96.8%| [TA] $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_solomon [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2026-01-10T12:30:40.821058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:30:40.822305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:30:40.822362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:30:40.831585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:30:40.831932Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2157] 2026-01-10T12:30:40.832213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:30:40.872360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:135:2157]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:30:40.883186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:30:40.883744Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:30:40.885248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-10T12:30:40.885311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-10T12:30:40.885352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-10T12:30:40.885638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:30:40.885723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:30:40.885786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:201:2157] in generation 2 2026-01-10T12:30:40.981777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:30:41.016984Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-10T12:30:41.017158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:30:41.017257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-10T12:30:41.017313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-10T12:30:41.017349Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-10T12:30:41.017385Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.017595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.017664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.017982Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-10T12:30:41.018084Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-10T12:30:41.018152Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.018185Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:30:41.018231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-10T12:30:41.018266Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-10T12:30:41.018314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-10T12:30:41.018345Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-10T12:30:41.018386Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-10T12:30:41.018479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:213:2212], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.018521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.018570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:213:2212], sessionId# [0:0:0] 2026-01-10T12:30:41.030319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:135:2157]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-10T12:30:41.030401Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:30:41.030499Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:30:41.030638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-10T12:30:41.030685Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-10T12:30:41.030725Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-10T12:30:41.030757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-10T12:30:41.030781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-10T12:30:41.030806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-10T12:30:41.030828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.031038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-10T12:30:41.031084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-10T12:30:41.031123Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-10T12:30:41.031150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.031194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-10T12:30:41.031218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-10T12:30:41.031241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-10T12:30:41.031262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.031277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-10T12:30:41.046304Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:30:41.046372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-10T12:30:41.046407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-10T12:30:41.046480Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-10T12:30:41.048526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-10T12:30:41.053722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:30:41.053864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-10T12:30:41.053947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:135:2157]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-10T12:30:41.053977Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-10T12:30:41.054098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-10T12:30:41.054203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-10T12:30:41.054243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-10T12:30:41.054285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-10T12:30:41.058133Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-10T12:30:41.058204Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:30:41.058426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:135:2157], Recipient [1:135:2157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:30:41.058514Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-10T12:30:41.058560Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-10T12:30:41.058595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-10T12:30:41.058629Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-10T12:30:41.058661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... execution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:33:48.085727Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit FinishPropose 2026-01-10T12:33:48.085768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit FinishPropose 2026-01-10T12:33:48.085826Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 8 ms, propose latency: 8 ms, status: COMPLETE 2026-01-10T12:33:48.085910Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is DelayComplete 2026-01-10T12:33:48.085941Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2026-01-10T12:33:48.085974Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit CompletedOperations 2026-01-10T12:33:48.086010Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2026-01-10T12:33:48.086067Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is Executed 2026-01-10T12:33:48.086094Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2026-01-10T12:33:48.086123Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 9437184 has finished 2026-01-10T12:33:48.090810Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:33:48.090908Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 9437184 on unit FinishPropose 2026-01-10T12:33:48.091100Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-10T12:33:52.083479Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-10T12:33:52.083559Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-10T12:33:52.084090Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:495:2469], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:33:52.084145Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:33:52.084191Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:494:2468], serverId# [3:495:2469], sessionId# [0:0:0] 2026-01-10T12:33:52.084371Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2026-01-10T12:33:52.084402Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:33:52.084472Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:33:52.085062Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2026-01-10T12:33:52.111632Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-10T12:33:52.111743Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2026-01-10T12:33:52.111790Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-10T12:33:52.111836Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2026-01-10T12:33:52.111906Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-10T12:33:52.111980Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2026-01-10T12:33:52.112026Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-10T12:33:52.112057Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-10T12:33:52.112104Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BlockFailPoint 2026-01-10T12:33:52.112135Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BlockFailPoint 2026-01-10T12:33:52.112169Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-10T12:33:52.112200Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BlockFailPoint 2026-01-10T12:33:52.112230Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2026-01-10T12:33:52.112254Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-10T12:33:52.143903Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2026-01-10T12:33:52.144351Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-10T12:33:52.144438Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-10T12:33:52.175850Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:33:52.175925Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-10T12:33:52.176713Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-10T12:33:52.419218Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:195: Tablet 9437184 is not ready for [0:11] execution 2026-01-10T12:33:52.420870Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-10T12:33:52.420948Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-10T12:33:52.777159Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:33:52.777239Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-10T12:33:52.778044Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-10T12:33:53.134268Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2026-01-10T12:33:53.136236Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-10T12:33:53.136321Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-10T12:33:53.145188Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-10T12:33:53.145253Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-10T12:33:53.146035Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-10T12:33:54.577833Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2026-01-10T12:33:54.577945Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-10T12:33:54.578018Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-10T12:33:54.578059Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2026-01-10T12:33:54.578095Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit FinishPropose 2026-01-10T12:33:54.578133Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit FinishPropose 2026-01-10T12:33:54.578179Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 6 ms, propose latency: 6 ms, status: COMPLETE 2026-01-10T12:33:54.578250Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is DelayComplete 2026-01-10T12:33:54.578281Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2026-01-10T12:33:54.578311Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit CompletedOperations 2026-01-10T12:33:54.578342Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2026-01-10T12:33:54.578388Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-10T12:33:54.578414Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2026-01-10T12:33:54.578441Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 9437184 has finished 2026-01-10T12:33:54.583840Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-10T12:33:54.583921Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:11] at 9437184 on unit FinishPropose 2026-01-10T12:33:54.583982Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics 2026-01-10 12:33:55,879 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:33:56,198 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 432849 58.6M 58.5M 32.5M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/003af0/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args 433296 2.0G 2.0G 1.6G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/003af0/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:01.033927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:01.142935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:01.152758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:01.153290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:01.153403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:01.626048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:01.749667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:01.749809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:01.785487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:01.860382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:02.717213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:02.718001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:02.718047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:02.718086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:02.718696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:02.778723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:03.377477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:06.868122Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:06.874817Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:06.877602Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:06.915024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:06.915173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:06.959584Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:06.969624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.183866Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.185513Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.186225Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187023Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187181Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187611Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187710Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.187853Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:07.385363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:07.409099Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:07.409212Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:07.433646Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:07.605878Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:07.657359Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:07.657456Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:07.697697Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:07.700203Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:07.700484Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:07.700567Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:07.700644Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:07.700722Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:07.700786Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:07.700855Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:07.702430Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:07.836141Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.836298Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2208:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:07.853249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2240:2634] 2026-01-10T12:24:07.853533Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2240:2634], schemeshard id = 72075186224037897 2026-01-10T12:24:08.049541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2413:2743] 2026-01-10T12:24:08.049970Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:08.052459Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Describe result: PathErrorUnknown 2026-01-10T12:24:08.052522Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Creating table 2026-01-10T12:24:08.052606Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:08.056889Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2432:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:08.060658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:08.071151Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:08.071335Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:08.101757Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:08.377634Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:08.621325Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:08.759903Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:08.759993Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2415:2745] Owner: [2:2414:2744]. Column diff is empty, finishing 2026-01-10T12:24:09.440148Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 057594046644480 2026-01-10T12:24:36.401063Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:24:36.401163Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:36.401488Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:24:36.416931Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:36.616444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:24:36.621622Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:24:36.625467Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1` 2026-01-10T12:24:36.625647Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], Start read next stream part 2026-01-10T12:24:36.637714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4554:4046], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.637873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4565:4051], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.638556Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.639779Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4570:4056], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.640044Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:36.647424Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4586:4060], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:36.651244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:36.718551Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4568:4054], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:24:36.823562Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4645:4103], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:36.879375Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4644:4102] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:37.341150Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4666:4116]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:37.341470Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:24:37.341577Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4668:4118] 2026-01-10T12:24:37.341644Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4668:4118] 2026-01-10T12:24:37.342082Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4669:4119] 2026-01-10T12:24:37.342194Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4668:4118], server id = [2:4669:4119], tablet id = 72075186224037894, status = OK 2026-01-10T12:24:37.342307Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4669:4119], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:37.342406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:24:37.342534Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:24:37.342619Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4666:4116], StatRequests.size() = 1 2026-01-10T12:24:37.342753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:28:38.788337Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxv6gh17wnmbm8k10ckaqa", SessionId: ydb://session/3?node_id=2&id=ZjZkYWQxNGYtNDNhNTA0MS0xYjA1YTRlLWE4MGQ4NDE1, Slow query, duration: 242.155727s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-10T12:28:38.789984Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:28:38.790403Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], Start read next stream part 2026-01-10T12:28:38.791131Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-10T12:28:38.791452Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4800:4239], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:28:38.794635Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:28:38.794714Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4541:4040], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWE0NmQ3MmUtNGIzZWZmZTgtOTZjOGQxMWYtM2Q0NDUzNGQ=, TxId: 2026-01-10T12:28:38.795786Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4800:4239], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-10T12:28:38.795905Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4531:2472], ActorId: [2:4800:4239], Start read next stream part 2026-01-10T12:28:39.064987Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4826:4261]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:28:39.065370Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:28:39.065438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4826:4261], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003af0/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003af0/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk16/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |96.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics 2026-01-10 12:33:58,620 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:33:58,911 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 433616 58.7M 56.1M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/003ae5/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args 433745 2.1G 2.1G 1.7G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/003ae5/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.604668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:03.712147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:03.715080Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:616:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:03.715484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:03.715654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:04.215597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:04.361892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:04.362022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.409818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:04.541469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:05.380810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:05.381836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:05.381896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:05.381955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:05.382512Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:05.471726Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:06.105338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.575117Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.582884Z node 4 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 4 2026-01-10T12:24:09.586996Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:09.632271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.632417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.673417Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:24:09.675614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.934015Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.934148Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.935741Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.936694Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.937430Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.938308Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.938491Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.938616Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.938820Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.938941Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.939058Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.955704Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:10.180973Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:10.238209Z node 4 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:10.238298Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:10.280914Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:10.281268Z node 4 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:10.281506Z node 4 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:10.281578Z node 4 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:10.281632Z node 4 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:10.281691Z node 4 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:10.281746Z node 4 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:10.281800Z node 4 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:10.282476Z node 4 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:10.283945Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2082:2456] 2026-01-10T12:24:10.294560Z node 4 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-10T12:24:10.301571Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.301685Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [4:2312:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.305387Z node 4 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Describe result: PathErrorUnknown 2026-01-10T12:24:10.305476Z node 4 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Creating table 2026-01-10T12:24:10.305559Z node 4 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-10T12:24:10.333676Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2386:2653] 2026-01-10T12:24:10.334544Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2386:2653], schemeshard id = 72075186224037897 2026-01-10T12:24:10.363524Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [4:2423:2665], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.383972Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.400902Z node 4 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.401038Z node 4 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.417987Z node 4 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:10.519894Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-10T12:24:10.652517Z node 4 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:11.088842Z node 4 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:11.235464Z node 4 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:11.235564Z node 4 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [4:2300:2593] Owner: [4:2299:2592]. Column diff is empty, finishing 2026-01-10T12:24:11.999266Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... 06Z node 4 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [4:7770:5398] 2026-01-10T12:24:48.445494Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:7771:5399] 2026-01-10T12:24:48.445664Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [4:7771:5399], node id = 4, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:48.445753Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-10T12:24:48.445938Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:7770:5398], server id = [4:7771:5399], tablet id = 72075186224037894, status = OK 2026-01-10T12:24:48.446138Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-10T12:24:48.446230Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [4:7767:5395], StatRequests.size() = 1 2026-01-10T12:24:50.257291Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:28:40.393535Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxvhjgasbbwhgxn9rvdm79", SessionId: ydb://session/3?node_id=4&id=ZTlmZWMzYzYtODc3NzkxYmItZDA2MjNjODYtN2EwNTkwYTM=, Slow query, duration: 232.430981s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Serverless1/Table1`", parameters: 0b 2026-01-10T12:28:40.395390Z node 4 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7740:5381], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:28:40.395780Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7740:5381], Start read next stream part 2026-01-10T12:28:40.396455Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-10T12:28:40.396575Z node 4 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], Bootstrap. Database: /Root/Serverless1, IsSystemUser: 0, run create session 2026-01-10T12:28:40.399228Z node 4 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7740:5381], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:28:40.399303Z node 4 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7740:5381], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MmU1YTZlYmYtMjQ5YTU4YmUtODcwYzc5MzAtNDY3ZTNhZDU=, TxId: 2026-01-10T12:28:40.400573Z node 4 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Serverless1/Table1` 2026-01-10T12:28:40.400684Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], Start read next stream part 2026-01-10T12:28:40.678814Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:7927:5539]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:28:40.679163Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:28:40.679221Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [4:7927:5539], StatRequests.size() = 1 2026-01-10T12:33:58.389244Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01keky2mjgc64x4ym6dhq9x796", SessionId: ydb://session/3?node_id=4&id=OWY0MWY3ZDItNDg1ODFhODQtMWM4NTgyNWUtZTE0YzkwZDI=, Slow query, duration: 317.984618s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Serverless1/Table1`", parameters: 0b 2026-01-10T12:33:58.392861Z node 4 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:33:58.393166Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], Start read next stream part 2026-01-10T12:33:58.394497Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33590, txId: 18446744073709551615] shutting down 2026-01-10T12:33:58.395086Z node 4 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:33:58.395150Z node 4 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [4:7728:2465], ActorId: [4:7909:5524], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTBmNjdmNmQtMWNhZWI2NTAtNjkwNWZjM2QtMjA4YzNhOTM=, TxId: 2026-01-10T12:33:58.395696Z node 4 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [4:8041:5643], ActorId: [4:8044:5646], Starting query actor #1 [4:8047:5648] 2026-01-10T12:33:58.395774Z node 4 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [4:8044:5646], ActorId: [4:8047:5648], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-10T12:33:58.401851Z node 4 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [4:8044:5646], ActorId: [4:8047:5648], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=YjIxYTA4Ny1iYjUwNDFhYS03OTlkZDQzYy04YzgxYzZkYg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-10T12:33:58.493853Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [4:8059:5659]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:33:58.494501Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-10T12:33:58.494711Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2026-01-10T12:33:58.494784Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-10T12:33:58.495128Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-10T12:33:58.495250Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [4:8059:5659], StatRequests.size() = 1 2026-01-10T12:33:58.495478Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003ae5/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003ae5/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics 2026-01-10 12:33:58,126 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:33:58,685 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 433553 58.6M 58.6M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/d9xi/003aea/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk19/testing_out_stuff/test_tool.args 433713 2.0G 2.0G 1.6G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/d9xi/003aea/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:24:03.300099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:03.410531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:03.416861Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:24:03.417318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:24:03.417417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:24:03.860186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:03.966468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:03.966572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:04.006373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:04.107047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:24:04.836272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:24:04.837009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:24:04.837044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:24:04.837077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:24:04.837568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:24:04.906888Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:24:05.498665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-10T12:24:09.043563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:24:09.051708Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-10T12:24:09.055488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:24:09.099236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.099371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.143310Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:24:09.146425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.342334Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.343073Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.343855Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.344808Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.344983Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.345298Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.345481Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.345626Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.345843Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-10T12:24:09.485445Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-10T12:24:09.543064Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:24:09.543200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:24:09.557481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:24:09.748548Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:24:09.799565Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-10T12:24:09.799660Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-10T12:24:09.841114Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-10T12:24:09.842803Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-10T12:24:09.843062Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-10T12:24:09.843149Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-10T12:24:09.843231Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-10T12:24:09.843303Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-10T12:24:09.843375Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-10T12:24:09.843435Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-10T12:24:09.844307Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-10T12:24:09.982188Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:09.982311Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2205:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-10T12:24:10.004408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2634] 2026-01-10T12:24:10.004859Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2634], schemeshard id = 72075186224037897 2026-01-10T12:24:10.224723Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2410:2743] 2026-01-10T12:24:10.225102Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-10T12:24:10.227125Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Describe result: PathErrorUnknown 2026-01-10T12:24:10.227219Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Creating table 2026-01-10T12:24:10.227314Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-10T12:24:10.231347Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2429:2750], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:10.235099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:24:10.242472Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-10T12:24:10.242609Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on create table tx: 281474976720657 2026-01-10T12:24:10.260708Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Subscribe on tx: 281474976720657 registered 2026-01-10T12:24:10.527131Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-10T12:24:10.781547Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-10T12:24:10.904820Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-10T12:24:10.904927Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2412:2745] Owner: [2:2411:2744]. Column diff is empty, finishing 2026-01-10T12:24:11.557374Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 94046644480 2026-01-10T12:24:45.778257Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-10T12:24:45.778352Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-10T12:24:45.778742Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-10T12:24:45.803301Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-10T12:24:46.043344Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-10T12:24:46.047755Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:24:46.051522Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1` 2026-01-10T12:24:46.051694Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], Start read next stream part 2026-01-10T12:24:46.062438Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4523:4038], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.062559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4535:4043], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.063125Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.064418Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4540:4048], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.064648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:24:46.070080Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4556:4052], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:46.073385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:24:46.132353Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4538:4046], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-10T12:24:46.350225Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4627:4096], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-10T12:24:46.381937Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4626:4095] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:24:46.718360Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4648:4109]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:24:46.718621Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-10T12:24:46.718705Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4650:4111] 2026-01-10T12:24:46.718753Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4650:4111] 2026-01-10T12:24:46.719195Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4650:4111], server id = [2:4651:4112], tablet id = 72075186224037894, status = OK 2026-01-10T12:24:46.719250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4651:4112] 2026-01-10T12:24:46.719336Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4651:4112], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-10T12:24:46.719414Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-10T12:24:46.719534Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-10T12:24:46.719615Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4648:4109], StatRequests.size() = 1 2026-01-10T12:24:46.719733Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-10T12:28:44.827671Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kekxvfq3fhs7c27npkhzx0n4", SessionId: ydb://session/3?node_id=2&id=M2Y5OWNkZGUtYjFmZTg0ZTQtN2VlNTNiMS0zMjYxYjhlMg==, Slow query, duration: 238.769389s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-10T12:28:44.829551Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:28:44.829966Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], Start read next stream part 2026-01-10T12:28:44.830798Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-10T12:28:44.831112Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4782:4232], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-10T12:28:44.834017Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:28:44.834091Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4511:4032], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjRjYzI3NzMtN2MyODE2NGQtZjAzZTJjOGItNWNhMzg2Mzk=, TxId: 2026-01-10T12:28:44.835129Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4782:4232], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-10T12:28:44.835256Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4501:2470], ActorId: [2:4782:4232], Start read next stream part 2026-01-10T12:28:45.082321Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4808:4254]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-10T12:28:45.082723Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-10T12:28:45.082791Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4808:4254], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003aea/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk19/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/d9xi/003aea/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk19/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> data_paging.py::TestDataPaging::test_data_paging_solomon >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] >> basic_reading.py::TestBasicReading::test_basic_reading_solomon >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> data_paging.py::TestDataPaging::test_data_paging_solomon [GOOD] >> data_paging.py::TestDataPaging::test_listing_paging_monitoring >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_solomon [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn-useSink >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> KqpLimits::TooBigColumn-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29663, MsgBus: 4747 2026-01-10T12:27:03.618211Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593708437515076155:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:03.618251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:27:03.865439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:27:04.000255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593708437515076122:2081] 1768048023617596 != 1768048023617599 2026-01-10T12:27:04.008811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:27:04.008919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:27:04.016505Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:27:04.017647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:27:04.138736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:27:04.172431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:27:04.172454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:27:04.172461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:27:04.172554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:27:04.636195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:27:04.691224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:27:04.703063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:27:04.767924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:04.983967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:05.142864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:05.218544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.180046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708454694947176:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.180210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.180519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708454694947186:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.180590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.533685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.566709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.600554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.637929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.673752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.727965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.759621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.798686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:27:07.887493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708454694948055:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.887621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.887948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708454694948060:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.888060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593708454694948061:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.888145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:27:07.891817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:27:07.902855Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593708454694948064:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:27:07.994323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593708454694948115:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:27:08.618320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593708437515076155:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:27:08.618408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:27:09.615259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... elete /Root Strong=0 2026-01-10T12:34:21.635909Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:34:21.816758Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:34:21.816787Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:34:21.816801Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:34:21.816900Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:34:22.093836Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:34:22.391763Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:34:22.661072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:34:22.681519Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:22.860723Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:23.101692Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:23.190543Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:26.373990Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7593710318621975606:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:26.374078Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:34:28.193856Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593710348686748520:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:28.193968Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:28.194336Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593710348686748530:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:28.194411Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:28.331740Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.400067Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.497364Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.555047Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.621588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.719130Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.808710Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:28.986909Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:29.320278Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593710352981716720:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:29.320398Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:29.320863Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593710352981716725:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:29.320915Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7593710352981716726:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:29.321269Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:29.327178Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:34:29.358397Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7593710352981716729:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:34:29.423545Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7593710352981716782:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:34:35.167248Z node 5 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:186: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2026-01-10T12:34:35.167418Z node 5 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710673 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2026-01-10T12:34:35.171299Z node 5 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [5:7593710374456553626:2559] TxId: 281474976710673. Ctx: { TraceId: 01kekyddygf8s61941jj93gxsx, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YmVlOTZjZDUtYzYyZjE4OWYtNzFjZGIxMDUtYzc1MzgzODI=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; trace_id# 2026-01-10T12:34:35.171841Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=YmVlOTZjZDUtYzYyZjE4OWYtNzFjZGIxMDUtYzc1MzgzODI=, ActorId: [5:7593710370161586296:2559], ActorState: ExecuteState, LegacyTraceId: 01kekyddygf8s61941jj93gxsx, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.9%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] >> KqpBatchDelete::SimpleOnePartition >> listing_paging.py::TestListingPaging::test_listing_paging_solomon >> KqpBatchUpdate::TableNotExists >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> KqpBatchUpdate::TableNotExists [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 26174, MsgBus: 32678 2026-01-10T12:34:50.569357Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710443978065122:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:50.569527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:34:50.600614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:34:51.151677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:34:51.151793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:34:51.155050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:34:51.213002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:34:51.252321Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710443978064887:2081] 1768048490499392 != 1768048490499395 2026-01-10T12:34:51.259572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:34:51.471103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:34:51.476317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:34:51.476344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:34:51.476351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:34:51.476428Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:34:51.569991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:34:52.241071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:34:52.267192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:34:54.915914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710461157934965:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.916073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.919468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710461157934977:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.919581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710461157934978:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.919931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.924587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:34:54.940270Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710461157934981:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-10T12:34:55.075295Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710465452902328:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:34:55.387629Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710465452902338:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At function: KiUpdateTable!
:3:34: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:34:55.390379Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Njg2NzA4MWEtZmYyNzZiMGYtYzZhN2ZmMC1hODY4ZjJiYw==, ActorId: [1:7593710461157934961:2323], ActorState: ExecuteState, LegacyTraceId: 01kekye29t94a9ev0zbacxjr14, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 3 column: 34 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 34 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:34:55.452206Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710465452902363:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:4:41: Error: At function: KiUpdateTable!
:4:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:34:55.454357Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Njg2NzA4MWEtZmYyNzZiMGYtYzZhN2ZmMC1hODY4ZjJiYw==, ActorId: [1:7593710461157934961:2323], ActorState: ExecuteState, LegacyTraceId: 01kekye2sa2vastrk5xf0sw308, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 41 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 4 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:34:55.568703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710443978065122:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:55.568905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> KqpBatchDelete::TableWithIndex >> KqpBatchUpdate::ManyPartitions_3 >> KqpBatchUpdate::Returning [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] >> KqpBatchUpdate::MultiStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 6921, MsgBus: 27822 2026-01-10T12:34:58.171339Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710476969804260:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:58.173435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:34:58.464221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:34:58.477618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:34:58.477733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:34:58.543551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:34:58.602214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:34:58.604192Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710476969804154:2081] 1768048498165580 != 1768048498165583 2026-01-10T12:34:58.713069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:34:58.733255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:34:58.733278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:34:58.733289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:34:58.733389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:34:59.185990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:34:59.186304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:34:59.191524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:34:59.234283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:59.350733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:59.518249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:59.605200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:01.520898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710489854707944:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:01.521008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:01.521510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710489854707954:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:01.521554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:01.966479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.004273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.051128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.086826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.132193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.182908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.268020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.310473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:02.400303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710494149676127:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:02.400396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:02.400625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710494149676132:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:02.400687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710494149676133:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:02.400730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:02.405103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:02.415792Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710494149676136:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:35:02.491676Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710494149676189:3784] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:35:03.170970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710476969804260:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:03.172510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:04.167763Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710502739611083:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2026-01-10T12:35:04.168192Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjM3ODUwMi0xN2RiYzBkMy1lOWZlYmRkOC1hZTEwMDEyMg==, ActorId: [1:7593710502739611074:2531], ActorState: ExecuteState, LegacyTraceId: 01kekyebaj3axfkzwww1vpt99w, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> KqpBatchUpdate::MultiStatement [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 2092, MsgBus: 6903 2026-01-10T12:35:05.878397Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710507722769724:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:05.878521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:35:06.164630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:35:06.199689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:35:06.199806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:35:06.264974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:35:06.336204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710507722769521:2081] 1768048505845388 != 1768048505845391 2026-01-10T12:35:06.336680Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:35:06.362776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:35:06.420093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:35:06.420115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:35:06.420122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:35:06.420203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:35:06.881436Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:35:07.022363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:35:07.102154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:07.279120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:07.444015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:07.529967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.460529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710524902640585:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.460650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.461092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710524902640595:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.461129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.851455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.894855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.939281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.992596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:10.034233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:10.096291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:10.135590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:10.194019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:10.288410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710529197608764:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:10.288550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:10.288842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710529197608770:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:10.288875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710529197608769:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:10.288902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:10.293519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:10.307279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710529197608773:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:35:10.405258Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710529197608824:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:35:10.876223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710507722769724:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:10.876285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:12.110371Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710537787543722:2539], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:35:12.110750Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWQxYmZjM2EtYThjNzJkODYtODVlMDY3MWYtNGFmN2MyMGY=, ActorId: [1:7593710537787543713:2533], ActorState: ExecuteState, LegacyTraceId: 01kekyek0m095y98m01dys2xz1, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 5 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 5 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-10T12:35:12.132641Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710537787543726:2541], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:35:12.134600Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWQxYmZjM2EtYThjNzJkODYtODVlMDY3MWYtNGFmN2MyMGY=, ActorId: [1:7593710537787543713:2533], ActorState: ExecuteState, LegacyTraceId: 01kekyek3pfexn3bpxw0g8j62y, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-10T12:35:12.157992Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710537787543730:2543], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:35:12.159892Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWQxYmZjM2EtYThjNzJkODYtODVlMDY3MWYtNGFmN2MyMGY=, ActorId: [1:7593710537787543713:2533], ActorState: ExecuteState, LegacyTraceId: 01kekyek4df4td8r51m0j5cymg, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-10T12:35:12.179865Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710537787543734:2545], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:35:12.180208Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWQxYmZjM2EtYThjNzJkODYtODVlMDY3MWYtNGFmN2MyMGY=, ActorId: [1:7593710537787543713:2533], ActorState: ExecuteState, LegacyTraceId: 01kekyek568s73m43m1xa3vnhx, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-10T12:35:12.198369Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593710537787543740:2547], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-10T12:35:12.198766Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWQxYmZjM2EtYThjNzJkODYtODVlMDY3MWYtNGFmN2MyMGY=, ActorId: [1:7593710537787543713:2533], ActorState: ExecuteState, LegacyTraceId: 01kekyek5t0tck6gmwzwqbxmnq, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KqpBatchDelete::TableWithIndex [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TA] $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |97.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21100, MsgBus: 1868 2026-01-10T12:35:04.903358Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710505519908740:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:04.903435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:35:04.944040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:35:05.304237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:35:05.312675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:35:05.312772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:35:05.361319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:35:05.423711Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:35:05.429794Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710505519908712:2081] 1768048504900409 != 1768048504900412 2026-01-10T12:35:05.518203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:35:05.546896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:35:05.546919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:35:05.546932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:35:05.547037Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:35:05.918336Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:35:06.175355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:35:06.196522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:35:06.274713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:06.474436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:06.748899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:06.842194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:08.818000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710522699779777:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:08.818147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:08.820489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710522699779787:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:08.820558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.207365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.244260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.281386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.331607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.363062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.405280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.442050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.493146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.575591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526994747956:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.575649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526994747961:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.575672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.575857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526994747964:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.575903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.579266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:09.594976Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710526994747963:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:35:09.662696Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710526994748016:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:35:09.904006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710505519908740:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:09.904114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:11.354150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:11.395783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:11.438467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:13.824350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KqpBatchUpdate::SimpleOnePartition >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [FAIL] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [FAIL] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program |97.0%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> KqpBatchDelete::SimpleOnePartition [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 18855, MsgBus: 2594 2026-01-10T12:34:50.111258Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710446518078489:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:50.111363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:34:50.518535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:34:50.547847Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710446518078256:2081] 1768048490074703 != 1768048490074706 2026-01-10T12:34:50.553972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:34:50.570069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:34:50.570554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:34:50.575231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:34:50.798372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:34:50.802874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:34:50.802895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:34:50.802906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:34:50.802988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:34:51.128280Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:34:51.434235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:34:51.447394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:34:51.578189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:51.804985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:51.991108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:52.128257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.128912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710463697949322:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.129069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.136304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710463697949333:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.136384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:54.630152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.667690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.707477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.745655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.811453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.847998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.928059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:54.999481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:34:55.076383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710467992917506:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:55.076467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:55.076738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710467992917511:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:55.076866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710467992917512:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:55.076914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:34:55.081399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:34:55.094270Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710467992917515:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:34:55.111536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710446518078489:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:34:55.111624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:34:55.183338Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710467992917570:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10142, MsgBus: 7345 2026-01-10T12:35:00.428842Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:35:21.695720Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:35:21.875635Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:35:21.900274Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593710577148731697:2081] 1768048521656037 != 1768048521656040 2026-01-10T12:35:21.913015Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:35:21.913119Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:35:21.917248Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:35:21.997065Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:35:22.041347Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:35:22.041376Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:35:22.041385Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:35:22.041470Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:35:22.048276Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:35:22.084494Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-10T12:35:22.180057Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:35:22.688409Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:35:22.854423Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:35:22.892468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:35:22.907472Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:23.002564Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:23.239472Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:23.378798Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:26.688487Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593710577148731917:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:26.688571Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:27.074437Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710602918537349:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.074531Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.074777Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710602918537358:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.074814Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.270805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.360714Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.431783Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.489016Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.558217Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.620819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.686918Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.779523Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:27.896008Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710602918538235:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.896128Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.896621Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710602918538240:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.896697Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710602918538241:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.896983Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.901850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:27.921153Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593710602918538244:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:35:27.983223Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593710602918538297:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [FAIL] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> KqpBatchUpdate::SimpleOnePartition [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 19156, MsgBus: 6606 2026-01-10T12:35:21.853058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710579315434005:2234];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:21.853122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:35:21.878775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:35:22.395537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:35:22.395663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:35:22.396379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:35:22.440453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:35:22.637558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:35:22.652347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710579315433797:2081] 1768048521790969 != 1768048521790972 2026-01-10T12:35:22.848351Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:35:22.896225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:35:22.917018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:35:22.917047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:35:22.917055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:35:22.917182Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:35:23.723798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:35:23.756056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:35:23.821661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:24.045553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:24.300065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:24.443457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:26.844216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710579315434005:2234];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:26.844334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:27.613220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710605085239455:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.613360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.614044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710605085239465:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:27.614136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.166282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.236800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.297964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.342631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.394197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.444276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.513957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.644037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:28.877645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710609380207637:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.877733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.878081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710609380207642:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.878111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710609380207643:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.878229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:28.882738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:28.917224Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710609380207646:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:35:29.017614Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710613675174997:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... 94046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:36:00.303480Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7593710743747059094:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:36:00.394002Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7593710743747059159:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8726, MsgBus: 15608 2026-01-10T12:36:09.435505Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:36:09.456204Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:36:09.543976Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:36:09.544068Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:36:09.547353Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:36:09.564148Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7593710784183188122:2081] 1768048569345552 != 1768048569345555 2026-01-10T12:36:09.566013Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:36:09.680237Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:36:09.820069Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:36:09.828426Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:36:09.828470Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:36:09.828634Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:36:10.368226Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:36:10.534715Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:36:10.551821Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:36:10.568336Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:10.675708Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:10.937132Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:11.019811Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.241457Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710809952993776:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.241562Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.242010Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710809952993786:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.242063Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.361494Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.418650Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.475676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.522967Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.580879Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.633923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.703488Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.789797Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:36:15.916582Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710809952994667:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.916705Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.917038Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710809952994672:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.917086Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593710809952994673:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.917193Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:36:15.921975Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:36:15.941044Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593710809952994676:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-10T12:36:16.032660Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593710814247962026:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [FAIL] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |97.1%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] |97.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |97.1%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_solomon [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> alter_compression.py::TestAlterCompression::test_availability_data >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.1%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |97.2%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |97.2%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> TKesusTest::TestUnregisterProxy |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> YdbSdkSessions::TestSessionPool >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessions::TestMultipleSessions >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> TOlap::StoreStats >> YdbSdkSessions::TestSessionPool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] >> YdbSdkSessions::MultiThreadSync |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2026-01-10T12:37:55.550565Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:37:55.550719Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:37:55.570771Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:37:55.570964Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:37:55.616801Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:37:56.109323Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:37:56.109454Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:37:56.133408Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:37:56.134151Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:37:56.171128Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:37:56.597366Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:37:56.597462Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:37:56.612118Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:37:56.612256Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:37:56.647106Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:37:56.647641Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=8798487715632610898, session=0, seqNo=0) 2026-01-10T12:37:56.647807Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-10T12:37:56.660369Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=8798487715632610898, session=1) 2026-01-10T12:37:56.660942Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:146:2168], cookie=8206625822647320510) 2026-01-10T12:37:56.661039Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:146:2168], cookie=8206625822647320510) 2026-01-10T12:37:57.124501Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:57.144845Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:57.514834Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:57.533061Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:57.893557Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:57.908046Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:58.277517Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:58.296884Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:58.704415Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:58.719587Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:59.087496Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:59.103939Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:59.470911Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:59.485981Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:37:59.868492Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:37:59.888767Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:00.280554Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:00.294349Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:00.704927Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:00.717344Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:01.137124Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:01.149681Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:01.525092Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:01.540874Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:01.911942Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:01.926356Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:02.334985Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:02.353224Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:02.792972Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:02.807433Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:03.201811Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:03.214362Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:03.592159Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:03.605263Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:03.972785Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:03.994620Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:04.401411Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:04.417217Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:04.812051Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:04.825311Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:05.212438Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:05.228863Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:05.615389Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:05.627894Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:06.010199Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:06.029011Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:06.389794Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:06.409209Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:06.809388Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:06.824704Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:07.221225Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:07.237327Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:07.620473Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:07.637075Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:08.010110Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:08.029321Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:08.406706Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:08.421200Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:08.840487Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:08.854052Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:09.248171Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:09.265604Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:09.703276Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:09.715824Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:10.107325Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:10.119686Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:10.480411Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:10.496952Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:10.896772Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:10.911277Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:11.295969Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:11.313815Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:11.705167Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:11.719177Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:12.093517Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:12.108756Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:12.488543Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:12.504948Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:12.916724Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:12.929414Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:13.300462Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:13.312475Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:13.710156Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:13.724964Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:14.100502Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:14.120749Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:14.532387Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:14.546501Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:15.001024Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:15.014303Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:15.406289Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:15.419375Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:15.794708Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:15.812369Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:16.188451Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:16.200668Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:16.573517Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:16.589298Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:17.010632Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:17.033215Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:17.400413Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:17.414046Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:17.793080Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:17.809295Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:18.181257Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:18.196899Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:18.540346Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:18.553056Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:18.944660Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:18.958758Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:19.336216Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:19.354563Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:19.738844Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:19.757249Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:20.131720Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:20.151664Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:20.508494Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-10T12:38:20.523014Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-10T12:38:21.008678Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-10T12:38:21.008943Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-10T12:38:21.025719Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-10T12:38:21.041973Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:535:2482], cookie=6783374611023717117) 2026-01-10T12:38:21.042100Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:535:2482], cookie=6783374611023717117) 2026-01-10T12:38:21.595154Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-10T12:38:21.595270Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-10T12:38:21.615257Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-10T12:38:21.615394Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-10T12:38:21.630604Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-10T12:38:21.640809Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=9945835385409234352, path="Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-10T12:38:21.641105Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-10T12:38:21.663936Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=9945835385409234352) 2026-01-10T12:38:21.665865Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:146:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:38:21.665936Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:146:2168], cookie=0) 2026-01-10T12:38:21.666155Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-10T12:38:21.666189Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=0) 2026-01-10T12:38:21.728043Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-10T12:38:21.728186Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-10T12:38:21.728498Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:151:2173]) 2026-01-10T12:38:21.728693Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2026-01-10T12:38:21.785967Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |97.3%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support |97.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> TOlap::StoreStats [GOOD] >> TOlap::Decimal |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> TOlap::Decimal [GOOD] >> TOlap::MoveTableStats >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TOlap::MoveTableStats [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::MoveTableStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:38:10.218357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:38:10.218429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:38:10.218468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:38:10.218509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:38:10.218549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:38:10.218569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:38:10.218617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:38:10.218695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:38:10.219486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:38:10.219717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:38:10.297149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:38:10.297212Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:38:10.312239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:38:10.312326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:38:10.312501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:38:10.319696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:38:10.319934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:38:10.320699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:38:10.320958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:38:10.323377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:38:10.323570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:38:10.325085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:38:10.325149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:38:10.325221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:38:10.325268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:38:10.325312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:38:10.325667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:38:10.467729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.468983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.469020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.469052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.469115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.469158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:38:10.469199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#203:0 progress is 1/1 2026-01-10T12:38:40.891693Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-10T12:38:40.891738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 203, ready parts: 1/1, is published: true 2026-01-10T12:38:40.891824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:863:2822] message: TxId: 203 2026-01-10T12:38:40.891896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-10T12:38:40.891958Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 203:0 2026-01-10T12:38:40.892002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 203:0 2026-01-10T12:38:40.892206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-10T12:38:40.892263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-10T12:38:40.892729Z node 3 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186233409546 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 203 2026-01-10T12:38:40.892936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-10T12:38:40.892997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-10T12:38:40.893086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-10T12:38:40.897156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 203: got EvNotifyTxCompletionResult 2026-01-10T12:38:40.897246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 203: satisfy waiter [3:1038:2987] 2026-01-10T12:38:40.897373Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 203 2026-01-10T12:38:40.898204Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:38:40.898431Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 256us result status StatusPathDoesNotExist 2026-01-10T12:38:40.898633Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:38:40.899518Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 41 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-10T12:38:40.899822Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 41 took 310us result status StatusSuccess 2026-01-10T12:38:40.900876Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 219 LastUpdateTime: 219 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:38:40.934070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 1225216 rowCount 100000 cpuUsage 0 2026-01-10T12:38:40.934268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:279: PersistSingleStats for pathId [OwnerId: 72057594046678944, LocalPathId: 39], tabletId 72075186233409546, followerId 0: unknown pathId 2026-01-10T12:38:40.944692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-10T12:38:41.269016Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MovedColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-10T12:38:41.269430Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MovedColumnTable" took 439us result status StatusSuccess 2026-01-10T12:38:41.270106Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 219 LastUpdateTime: 219 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] |97.4%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |97.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpBatchUpdate::ManyPartitions_3 [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 4297, MsgBus: 29135 2026-01-10T12:35:04.980114Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593710504918595641:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:04.980171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:35:05.394358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:35:05.394463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:35:05.441948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:35:05.459653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:35:05.468836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:35:05.472801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593710504918595485:2081] 1768048504957888 != 1768048504957891 2026-01-10T12:35:05.613728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:35:05.613755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:35:05.613768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:35:05.613883Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:35:05.688759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:35:06.032286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:35:06.243316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:35:06.256877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:35:06.351153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:06.531377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:06.909628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:07.028378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.068540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526393433840:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.068669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.069078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526393433850:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.069131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.407062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.452142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.489961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.550231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.583064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.624446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.668868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.724325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:35:09.826060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526393434716:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.826172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.826683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526393434721:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.826758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593710526393434722:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.826797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:35:09.831510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:35:09.849966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593710526393434725:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:35:09.918909Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593710526393434776:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:35:09.984196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593710504918595641:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:35:09.987357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:35:11.741403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... de 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:39:15.826231Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:39:15.938960Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:39:15.948208Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7593711580592807119:2081] 1768048755800079 != 1768048755800082 2026-01-10T12:39:15.987950Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:39:15.988289Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:39:16.020192Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:39:16.050288Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:39:16.123249Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-10T12:39:16.123286Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-10T12:39:16.123299Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-10T12:39:16.123424Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:39:16.813756Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:39:17.025523Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:39:17.053145Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:17.160645Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:17.446191Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:17.575277Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:20.808232Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7593711580592807147:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:39:20.808357Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:39:21.879951Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593711606362612782:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:21.880100Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:21.880550Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593711606362612792:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:21.880635Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.018342Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.079283Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.148627Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.227125Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.293529Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.373534Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.445371Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.559154Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:22.728943Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593711610657580975:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.729110Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.729830Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593711610657580980:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.729904Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7593711610657580981:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.730381Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:39:22.736962Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:39:22.767711Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7593711610657580984:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-10T12:39:22.861537Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7593711610657581049:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:39:25.993026Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:39:30.896807Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:39:30.896847Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |97.6%| [TA] $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/002068/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit_log.f3520al_.txt 2026-01-10T12:39:37.766207Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TTenantPoolTests::TestStateStatic |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TTenantPoolTests::TestStateStatic [GOOD] >> test_canonical_records.py::test_topic |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |97.7%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/002031/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log.ptie4rjn.txt 2026-01-10T12:39:40.311880Z: {"tx_id":"01kekyps0q3xm3hxk0hrvx1eng","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:39:40.311843Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-10T12:39:40.311142Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2026-01-10T12:39:40.464931Z: {"tx_id":"01kekyps0q3xm3hxk0hrvx1eng","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:39:40.464890Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:39:40.318934Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:39:40.479920Z: {"tx_id":"01kekyps0q3xm3hxk0hrvx1eng","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:39:40.479877Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-10T12:39:40.471990Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> TNetClassifierTest::TestInitFromFile |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TNetClassifierTest::TestInitFromFile [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2026-01-10T12:40:03.524125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593711790384915019:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:40:03.524177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:40:03.555255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:40:03.835769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:40:03.835885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:40:03.838853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:40:03.926086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:40:03.934897Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:40:03.943311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002986/r3tmp/yandexe3k1Yp.tmp 2026-01-10T12:40:03.943333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002986/r3tmp/yandexe3k1Yp.tmp 2026-01-10T12:40:03.943687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002986/r3tmp/yandexe3k1Yp.tmp 2026-01-10T12:40:03.943785Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:40:04.133441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:40:04.534904Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |97.7%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f58/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit_log.3nxz04w1.txt 2026-01-10T12:40:07.143738Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:40:07.143691Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:40:06.930980Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f5e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk22/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit_log.1he3mv3c.txt 2026-01-10T12:40:03.216605Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:40:03.216559Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-10T12:40:03.136393Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f4f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.vlp7rfm3.txt 2026-01-10T12:40:10.163439Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:40:10.163396Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-10T12:40:09.999524Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f47/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit_log.s6o547v4.txt |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] Test command err: ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 1 |97.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001f30/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit_log.pk2p5lli.txt 2026-01-10T12:40:22.577440Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:40:22.577394Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-10T12:40:22.371229Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::StressTestAsync/0 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] >> test_canonical_records.py::test_replace_config |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TA] $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001ecd/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit_log.333422yg.txt 2026-01-10T12:40:59.373784Z: {"commit_tx":"1","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","cloud_id":"cloud-id-A","begin_tx":"1","resource_id":"database-id-C","end_time":"2026-01-10T12:40:59.373727Z","tx_id":"{none}","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:40:59.192825Z","database":"/Root/test_auditlog.py","subject":"root@builtin","status":"SUCCESS","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","component":"grpc-proxy","sanitized_token":"**** (B6C6F477)","detailed_status":"SUCCESS","remote_address":"127.0.0.1"} |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001ea1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.ml7qksz8.txt 2026-01-10T12:41:04.397534Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001eb1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit_log.hzrpbc2g.txt 2026-01-10T12:41:05.240564Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.240519Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-10T12:41:05.222244Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:41:05.391810Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.391773Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-10T12:41:05.352159Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:41:05.520030Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.520002Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-10T12:41:05.502617Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:41:05.656515Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.656480Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-10T12:41:05.631925Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:41:05.792714Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.792680Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-10T12:41:05.769626Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-10T12:41:05.939027Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:05.938996Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-10T12:41:05.906415Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001e4a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.fhd2kgbt.txt 2026-01-10T12:41:18.872927Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_restarts.py::test_basic >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_canonical_records.py::test_dml >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001e39/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit_log._derr4nz.txt 2026-01-10T12:41:19.573864Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:19.573821Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-10T12:41:19.489535Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_canonical_records.py::test_dml_through_http |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_canonical_records.py::test_execute_minikql |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001de6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit_log.ip_9puur.txt 2026-01-10T12:41:32.210398Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_canonical_records.py::test_restart_pdisk |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001d69/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log._ws2hu0m.txt 2026-01-10T12:41:44.528048Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001d67/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit_log.v21iqklo.txt 2026-01-10T12:41:44.746933Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_select.py::TestPgSelect::test_as_table |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_as_table |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_auditlog.py::test_dynconfig [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> YdbSdkSessionsPool1Session::GetSession/0 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_canonical_records.py::test_replace_config [GOOD] >> YdbSdkSessionsPool1Session::CustomPlan/0 >> YdbSdkSessionsPool::PeriodicTask/0 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> YdbSdkSessionsPool::StressTestSync/1 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001d09/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.yenuscen.txt 2026-01-10T12:41:51.047642Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-10T12:41:51.047602Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-10T12:41:50.975102Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_select.py::TestPgSelect::test_as_table [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] >> YdbSdkSessionsPool1Session::FailTest/0 >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> YdbSdkSessionsPool::WaitQueue/0 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_select.py::TestSelect::test_as_table [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |98.8%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001c83/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit_log.fqa5ym5k.txt 2026-01-10T12:42:07.877868Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_as_table [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/d9xi/001c87/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_dynconfig/audit_log.8hjohlic.txt 2026-01-10T12:42:05.322564Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_as_table [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_simple.py::TestSimple::test[alter_table] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_canonical_records.py::test_dml [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] >> test_canonical_records.py::test_execute_minikql [GOOD] >> test_query_cache.py::TestQueryCache::test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> LongTxService::BasicTransactions |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> ConfigGRPCService::ReplaceConfig >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> Splitter::Simple >> Splitter::Simple [GOOD] >> Splitter::Small >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.9%| [TS] {RESULT} ydb/tests/library/ut/py3test |98.9%| [TS] {BAZEL_UPLOAD} ydb/tests/library/ut/py3test >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> DataShardBackgroundCompaction::ShouldCompact |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_1 [GOOD] >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/raw_socket/ut/unittest >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] |98.9%| [TS] {RESULT} ydb/core/raw_socket/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/raw_socket/ut/unittest >> LongTxService::LockSubscribe [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2026-01-10T12:42:56.025497Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:42:56.026154Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmp9wYcyN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:42:56.027522Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmp9wYcyN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmp9wYcyN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15090304240580259784 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:42:56.132351Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 1] Received TEvBeginTx from [1:438:2328] 2026-01-10T12:42:56.132497Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:123: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.149168Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:439:2101] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.149300Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:102:2048] 2026-01-10T12:42:56.149475Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:153:2090] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.149740Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:439:2101] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.149928Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 1] Received TEvCommitTx from [2:153:2090] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.149985Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:162: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 without side-effects 2026-01-10T12:42:56.150246Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:439:2101] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.150448Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:153:2090] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.151761Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:439:2101] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.151941Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:153:2090] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=1 2026-01-10T12:42:56.153035Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-10T12:42:56.153132Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-10T12:42:56.153257Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-10T12:42:56.153531Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:102:2048] 2026-01-10T12:42:56.154291Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:360:2277] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2026-01-10T12:42:56.154475Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:151:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-10T12:42:56.158845Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:439:2101] LongTxId# ydb://long-tx/000000001h3903h3n27hy7rzs4?node_id=3 2026-01-10T12:42:56.159102Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:477:2103] 2026-01-10T12:42:57.014480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:42:57.014543Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:42:57.344916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:42:58.170501Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:42:58.171012Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpNzPDGU/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:42:58.171487Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpNzPDGU/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpNzPDGU/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2122875387434730394 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:42:58.720470Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:346: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:623:2495] for database /dc-1 2026-01-10T12:42:58.720555Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.731219Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.731424Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:821:2678] Sending navigate request for /dc-1 2026-01-10T12:42:58.743616Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:821:2678] Received navigate response status Ok 2026-01-10T12:42:58.743698Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:821:2678] Sending acquire step to coordinator 72057594046316545 2026-01-10T12:42:58.744200Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:821:2678] Received read step 1500 2026-01-10T12:42:58.744316Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2026-01-10T12:42:58.744477Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:623:2495] 2026-01-10T12:42:58.744521Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.754917Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.755096Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:834:2685] Sending navigate request for /dc-1 2026-01-10T12:42:58.755395Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:834:2685] Received navigate response status Ok 2026-01-10T12:42:58.755432Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:834:2685] Sending acquire step to coordinator 72057594046316545 2026-01-10T12:42:58.755578Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:834:2685] Received read step 1500 2026-01-10T12:42:58.755640Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2026-01-10T12:42:58.755674Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:425: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2026-01-10T12:42:58.755972Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:623:2495] 2026-01-10T12:42:58.756008Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.766606Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-10T12:42:58.767032Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:836:2687] Sending navigate request for /dc-1 2026-01-10T12:42:58.767264Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:836:2687] Received navigate response status Ok 2026-01-10T12:42:58.767538Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:836:2687] Sending acquire step to coordinator 72057594046316545 2026-01-10T12:42:58.767738Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:836:2687] Received read step 1500 2026-01-10T12:42:58.768045Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2026-01-10T12:42:58.768153Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:423: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ed4r84c4433k0tbc2q?node_id=3&snapshot=1500%3Amax 2026-01-10T12:42:59.771099Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-10T12:42:59.771556Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpZCpOe8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-10T12:42:59.771774Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpZCpOe8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/d9xi/002791/r3tmp/tmpZCpOe8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9883319834479740079 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-10T12:42:59.831792Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:468: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2026-01-10T12:42:59.831907Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:441:2331] for LockId# 987 LockNode# 5 2026-01-10T12:42:59.852542Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:442:2101] for LockId# 987 LockNode# 5 2026-01-10T12:42:59.852707Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:101:2048] 2026-01-10T12:42:59.856387Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 987 LockNode# 5 2026-01-10T12:42:59.857884Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-10T12:42:59.858104Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:441:2331] for LockId# 123 LockNode# 5 2026-01-10T12:42:59.858262Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:442:2101] for LockId# 123 LockNode# 5 2026-01-10T12:42:59.862400Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 123 LockNode# 5 2026-01-10T12:42:59.862627Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2026-01-10T12:42:59.862790Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:479: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2026-01-10T12:42:59.862946Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-10T12:42:59.863114Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:442:2101] for LockId# 234 LockNode# 5 2026-01-10T12:42:59.863402Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 6 2026-01-10T12:42:59.863640Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:101:2048] 2026-01-10T12:42:59.863795Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T12:42:59.863893Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-10T12:42:59.864196Z node 6 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [6:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-10T12:42:59.864673Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:77:2077] ServerId# [5:365:2282] TabletId# 72057594037932033 PipeClientId# [6:77:2077] 2026-01-10T12:43:00.112515Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:473:2048] 2026-01-10T12:43:00.112780Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.112829Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.113294Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:473:2048] 2026-01-10T12:43:00.113508Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:474:2102] ServerId# [5:478:2351] TabletId# 72057594037932033 PipeClientId# [6:474:2102] 2026-01-10T12:43:00.406922Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:492:2048] 2026-01-10T12:43:00.407197Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.407246Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.407881Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:492:2048] 2026-01-10T12:43:00.408258Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:493:2103] ServerId# [5:497:2361] TabletId# 72057594037932033 PipeClientId# [6:493:2103] 2026-01-10T12:43:00.727123Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:514:2048] 2026-01-10T12:43:00.727419Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.727468Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-10T12:43:00.727912Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:514:2048] 2026-01-10T12:43:00.728226Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:516:2105] ServerId# [5:520:2375] TabletId# 72057594037932033 PipeClientId# [6:516:2105] |98.9%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/long_tx_service/ut/unittest >> test_query_cache.py::TestQueryCache::test [GOOD] >> BulkUpsert::BulkUpsert >> test_canonical_records.py::test_restart_pdisk [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> ServerRestartTest::RestartOnGetSession >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TWriteQuoterTests::WaitDeduplicationIdQuota |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig [GOOD] >> ConfigGRPCService::FetchConfig >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> Coordinator::ReadStepSubscribe |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> ExportS3BufferTest::MinBufferSize [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompression [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] |98.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_export/unittest >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] |98.9%| [TS] {RESULT} ydb/core/tx/datashard/ut_export/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_export/unittest >> test_example.py::TestExample::test_example >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> TDataShardRSTest::TestCleanupInRS+UseSink >> test_canonical_requests.py::Test::test_essential |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] |98.9%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> KqpTpch::Query01 >> ReadUpdateWrite::Load >> ConfigGRPCService::FetchConfig [GOOD] >> ConfigGRPCService::CheckV1IsBlocked >> Splitter::CritSimple [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |98.9%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/columnshard/splitter/ut/unittest >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> ConfigGRPCService::CheckV1IsBlocked [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> ServerRestartTest::RestartOnGetSession [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::CheckV1IsBlocked [GOOD] Test command err: 2026-01-10T12:42:55.452228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593712527821113273:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:42:55.452357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:42:55.803648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:42:55.995192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:42:55.995363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:42:56.073550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:42:56.118484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:42:56.120661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:42:56.226512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710685:0, at schemeshard: 72057594046644480 2026-01-10T12:42:56.226579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710685:0 ProgressState 2026-01-10T12:42:56.226672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710685:0 progress is 1/1 2026-01-10T12:42:56.226685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710685 ready parts: 1/1 2026-01-10T12:42:56.226704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710685:0 progress is 1/1 2026-01-10T12:42:56.226733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710685 ready parts: 1/1 2026-01-10T12:42:56.226754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710685, ready parts: 1/1, is published: false 2026-01-10T12:42:56.226774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710685 ready parts: 1/1 2026-01-10T12:42:56.226790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710685:0 2026-01-10T12:42:56.226809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710685:0 2026-01-10T12:42:56.226911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 30] was 2 2026-01-10T12:42:56.226935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710685, publications: 2, subscribers: 1 2026-01-10T12:42:56.226947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710685, [OwnerId: 72057594046644480, LocalPathId: 2], 38 2026-01-10T12:42:56.226954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710685, [OwnerId: 72057594046644480, LocalPathId: 30], 2 2026-01-10T12:42:56.227302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2026-01-10T12:42:56.227332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710674:0 ProgressState 2026-01-10T12:42:56.227376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710674:0 progress is 1/1 2026-01-10T12:42:56.227384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-01-10T12:42:56.227395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710674:0 progress is 1/1 2026-01-10T12:42:56.227403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-01-10T12:42:56.227413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: false 2026-01-10T12:42:56.227456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-01-10T12:42:56.227471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710674:0 2026-01-10T12:42:56.227477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710674:0 2026-01-10T12:42:56.227503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 2 2026-01-10T12:42:56.227518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710674, publications: 2, subscribers: 1 2026-01-10T12:42:56.227529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710674, [OwnerId: 72057594046644480, LocalPathId: 2], 38 2026-01-10T12:42:56.227536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710674, [OwnerId: 72057594046644480, LocalPathId: 19], 2 2026-01-10T12:42:56.227720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710663:0, at schemeshard: 72057594046644480 2026-01-10T12:42:56.227734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710663:0 ProgressState 2026-01-10T12:42:56.227769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710663:0 progress is 1/1 2026-01-10T12:42:56.227775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2026-01-10T12:42:56.227785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710663:0 progress is 1/1 2026-01-10T12:42:56.227791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2026-01-10T12:42:56.227799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710663, ready parts: 1/1, is published: false 2026-01-10T12:42:56.227810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2026-01-10T12:42:56.227818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710663:0 2026-01-10T12:42:56.227824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710663:0 2026-01-10T12:42:56.227864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 2 2026-01-10T12:42:56.227877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710663, publications: 2, subscribers: 1 2026-01-10T12:42:56.227884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710663, [OwnerId: 72057594046644480, LocalPathId: 2], 38 2026-01-10T12:42:56.227891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710663, [OwnerId: 72057594046644480, LocalPathId: 8], 2 2026-01-10T12:42:56.228048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710691:0, at schemeshard: 72057594046644480 2026-01-10T12:42:56.228062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710691:0 ProgressState 2026-01-10T12:42:56.228111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710691:0 progress is 1/1 2026-01-10T12:42:56.228118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710691 ready parts: 1/1 2026-01-10T12:42:56.228159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710691:0 progress is 1/1 2026-01-10T12:42:56.228158Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-10T12:42:56.228166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710691 ready parts: 1/1 2026-01-10T12:42:56.228175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710691, ready parts: 1/1, is published: false 2026-01-10T12:42:56.228193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710691 ready parts: 1/1 2026-01-10T12:42:56.228201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710691:0 2026-01-10T12:42:56.228213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710691:0 2026-01-10T12:42:56.228236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 36] was 2 2026-01-10T12:42:56.228243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710691, publications: 2, subscribers: 1 2026-01-10T12:42:56.228250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710691, [OwnerId: 72057594046644480, LocalPathId: 2], 38 2026-01-10T12:42:56.228286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710691, [OwnerId: 72057594046644480, LocalPathId: 36], 2 2026-01-10T12:42:56.228450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2026-01-10T12:42:56.228464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710680:0 ProgressState 2026-01-10T12:42:56.228503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part ope ... 657 2026-01-10T12:43:11.489361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2026-01-10T12:43:11.489378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:43:11.489454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-10T12:43:11.490187Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T12:43:11.490283Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T12:43:11.490297Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-10T12:43:11.490340Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-10T12:43:11.491947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7593712596762222960:2305] 2026-01-10T12:43:11.494149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2026-01-10T12:43:11.498519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:43:11.540141Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01kekyx79k1svb9hrd9pvjbfj1, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57210, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T12:43:11.601557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-10T12:43:11.602092Z node 1 :BS_CONTROLLER DEBUG: {BSC24@console_interaction.cpp:224} Console replace config request Request# {ClusterYaml: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" DedicatedConfigMode: false AllowUnknownFields: false BypassMetadataChecks: false PeerName: "ipv6:[::1]:57210" UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-10T12:43:11.603875Z node 1 :BS_CONTROLLER DEBUG: {BSC36@console_interaction.cpp:407} Sending TEvControllerValidateConfigRequest to console ConsolePipe# [1:7593712596762222439:2466] 2026-01-10T12:43:11.625774Z node 1 :BS_CONTROLLER DEBUG: {BSC27@console_interaction.cpp:478} Console validate config response Response# {Status: ConfigIsValid YAML: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" } 2026-01-10T12:43:11.634697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } Rollback: true StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-10T12:43:11.635050Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-10T12:43:11.635074Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-10T12:43:11.635092Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-10T12:43:11.635164Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-10T12:43:11.635174Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-10T12:43:11.635183Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-10T12:43:11.635192Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-10T12:43:11.645678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 5670699008 State: Normal SlotSizeInUnits: 0 PDiskUsage: 0.098231827111984277 } } 2026-01-10T12:43:11.646298Z node 1 :BS_CONTROLLER DEBUG: {BSC14@bsc.cpp:624} ApplyStorageConfig Request# {Request { Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } } 2026-01-10T12:43:11.648064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-10T12:43:11.652958Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-10T12:43:11.652992Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-10T12:43:11.653009Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-10T12:43:11.653081Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-10T12:43:11.653090Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-10T12:43:11.653098Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-10T12:43:11.653106Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-10T12:43:11.667042Z node 1 :BS_CONTROLLER ERROR: {BSC15@bsc.cpp:687} TEvControllerConfigResponse Response# {Status { Success: true } Status { Success: true } Status { Success: true } ErrorDescription: "failed to remove PDisk# 1:1 as it has active VSlots" ConfigTxSeqNo: 4 } 2026-01-10T12:43:11.668036Z node 1 :BS_CONTROLLER DEBUG: {BSC20@console_interaction.cpp:199} Console commit config response Response# {Status: Committed } 2026-01-10T12:43:11.684384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: FeatureFlags { SwitchToConfigV2: true } QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2026-01-10T12:43:11.684418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:11.688515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage 2026-01-10T12:43:11.688576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop response: ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Config.ReplaceConfigResult] { } } 2026-01-10T12:43:11.704198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:11.708486Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# FetchConfigRequest, traceId# 01kekyx7ewbjjqfjq5dtgj0vdj, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57210, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-10T12:43:11.709650Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-10T12:43:11.733304Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01kekyx7fg8mx3m4p9v97ax3m6, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57210, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef |98.9%| [TM] {RESULT} ydb/services/config/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/services/config/ut/unittest >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] >> test_example.py::TestExample::test_example [GOOD] |98.9%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> CoordinatorTests::Route >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> CoordinatorTests::WaitNodesConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/quota/ut/unittest >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] Test command err: processed_blobs=41800 quoted_time=9.980000s 2026-01-10T12:43:05.223951Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593712568696165421:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:05.224007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:43:05.327742Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:43:05.884289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:43:05.963692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:05.963777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:06.020695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:06.111659Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:06.112201Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7593712568696165385:2081] 1768048985207065 != 1768048985207068 2026-01-10T12:43:06.136562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:43:06.233975Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:06.657220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/00135b/r3tmp/yandex0xHrRl.tmp 2026-01-10T12:43:06.657319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/00135b/r3tmp/yandex0xHrRl.tmp 2026-01-10T12:43:06.664380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/00135b/r3tmp/yandex0xHrRl.tmp 2026-01-10T12:43:06.664499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:43:07.148493Z INFO: TTestServer started on Port 24445 GrpcPort 2379 PQClient connected to localhost:2379 2026-01-10T12:43:07.511785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:07.545687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-10T12:43:07.645514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:09.687589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593712585876035614:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:09.687636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593712585876035627:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:09.687767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:09.688735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7593712585876035629:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:09.688808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:09.695609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:43:09.733056Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7593712585876035631:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-10T12:43:09.834217Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7593712585876035695:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:43:10.462153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593712568696165421:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:10.462210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:43:10.521834Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7593712585876035703:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-10T12:43:10.543970Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjQ1M2UzNjgtN2NkNDQ1NDktNmQ1MGVmMjctNjZiMTNjYmI=, ActorId: [1:7593712585876035612:2331], ActorState: ExecuteState, LegacyTraceId: 01kekyx5ezb4j3ga6gt8xbkhpt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-10T12:43:10.553483Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-10T12:43:10.652884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:10.713952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:10.870864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7593712594465970601:2835] === CheckClustersList. Ok 2026-01-10T12:43:16.856248Z :TEST_CASE_NAME INFO: TTopicSdkTestSetup started |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.9%| [TM] {RESULT} ydb/core/persqueue/pqtablet/quota/ut/unittest |98.9%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/quota/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/query_cache/py3test >> CoordinatorTests::WaitNodesConnected [GOOD] >> CoordinatorTests::ProcessMappingWithNodeIds >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> CoordinatorTests::ProcessMappingWithNodeIds [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> CoordinatorTests::RebalanceAfterNewNodeConnected >> CoordinatorTests::RebalanceAfterNewNodeConnected [GOOD] >> CoordinatorTests::RebalanceAfterNodeDisconnected >> CoordinatorTests::RebalanceAfterNodeDisconnected [GOOD] >> LeaderElectionTests::Test1 >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.9%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |98.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> BasicExample::BasicExample |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> GraphShard::CreateGraphShard [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-10T12:43:27.016929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-10T12:43:27.017049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:43:27.017094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-10T12:43:27.017131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-10T12:43:27.017191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-10T12:43:27.017223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-10T12:43:27.017279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-10T12:43:27.017352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-10T12:43:27.018165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-10T12:43:27.020444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-10T12:43:27.160561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-10T12:43:27.160652Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:27.199254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-10T12:43:27.199362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-10T12:43:27.204251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-10T12:43:27.231508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-10T12:43:27.233291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-10T12:43:27.244187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-10T12:43:27.252719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-10T12:43:27.276056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:43:27.282264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-10T12:43:27.294120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:43:27.294223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:43:27.294291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-10T12:43:27.294336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-10T12:43:27.294451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-10T12:43:27.295468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-10T12:43:27.770246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.773990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.774088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.774174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.774270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-10T12:43:27.774350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2026-01-10T12:43:29.537420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-10T12:43:29.537545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:43:29.537715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-10T12:43:29.537919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720658 2026-01-10T12:43:29.540451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-10T12:43:29.540616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-10T12:43:29.540672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:715: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2026-01-10T12:43:29.540723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-10T12:43:29.541049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 128 -> 240 2026-01-10T12:43:29.541112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-10T12:43:29.541250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-10T12:43:29.541375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[1:821:2784], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-10T12:43:29.543829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-10T12:43:29.543877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-10T12:43:29.544054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-10T12:43:29.544132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-01-10T12:43:29.544636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-10T12:43:29.544703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2026-01-10T12:43:29.544750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 240 -> 240 2026-01-10T12:43:29.545365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:43:29.545458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-10T12:43:29.545493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-10T12:43:29.545538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-10T12:43:29.545599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-10T12:43:29.545682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2026-01-10T12:43:29.559808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-10T12:43:29.559874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:1 ProgressState 2026-01-10T12:43:29.559965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-10T12:43:29.559995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-10T12:43:29.560052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-10T12:43:29.560093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-10T12:43:29.560123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2026-01-10T12:43:29.560157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-10T12:43:29.560219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-10T12:43:29.560257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-10T12:43:29.560421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-10T12:43:29.560483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-10T12:43:29.560511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-10T12:43:29.560581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-10T12:43:29.561082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-10T12:43:29.569082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-10T12:43:29.569176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-10T12:43:29.569592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-10T12:43:29.569714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-10T12:43:29.569753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1020:2952] TestWaitNotification: OK eventTxId 102 2026-01-10T12:43:29.575618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-10T12:43:29.575922Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/db1" took 342us result status StatusSuccess 2026-01-10T12:43:29.577666Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.9%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/graph/shard/ut/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_commit.py::TestCommit::test_commit >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict >> BasicExample::BasicExample [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test_crud.py::TestYdbCrudOperations::test_crud_operations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:00.723951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:00.898098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:00.922222Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:00.922983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:00.923110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:01.568358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:01.568512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:01.575312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048977451612 != 1768048977451615 2026-01-10T12:43:01.592408Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:01.643996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:01.768433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:02.131308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:02.146263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:02.254599Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:43:02.254702Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:43:02.255750Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:43:02.541787Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:43:02.541914Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:43:02.544510Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:43:02.544675Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:43:02.545204Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:43:02.545355Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:43:02.545582Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:43:02.555520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:02.557746Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:43:02.558750Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:43:02.558831Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:43:02.655219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:43:02.656371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:43:02.657736Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:43:02.658021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:02.737999Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:43:02.738930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:02.739089Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:02.744624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:43:02.744742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:43:02.744822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:43:02.748585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:02.748818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:02.748959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:43:02.760069Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:02.813202Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:43:02.817700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:02.817959Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:43:02.818004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:43:02.818052Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:43:02.818101Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:02.818406Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:02.818455Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:02.821259Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:43:02.821418Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:43:02.821575Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:43:02.821630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:43:02.821747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:43:02.821787Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:43:02.821828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:43:02.821859Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:43:02.821909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:43:02.822051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:02.822098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:02.822140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:43:02.822257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:43:02.822299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:43:02.822499Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:43:02.822772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:43:02.822840Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:43:02.823937Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... Z node 5 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:43:31.442460Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:43:31.442488Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:43:31.442529Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:43:31.442568Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:43:31.442640Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:43:31.442675Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:43:31.442710Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:43:31.442743Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:43:31.442784Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-10T12:43:31.442807Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:43:31.442834Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-10T12:43:31.457401Z node 5 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:43:31.457511Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:43:31.457594Z node 5 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:43:31.457710Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:31.459975Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [5:68:2115] Handle TEvNavigate describe path /Root/table-1 2026-01-10T12:43:31.461002Z node 5 :TX_PROXY DEBUG: describe.cpp:270: Actor# [5:1057:2876] HANDLE EvNavigateScheme /Root/table-1 2026-01-10T12:43:31.461616Z node 5 :TX_PROXY DEBUG: describe.cpp:354: Actor# [5:1057:2876] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-10T12:43:31.461787Z node 5 :TX_PROXY DEBUG: describe.cpp:433: Actor# [5:1057:2876] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2026-01-10T12:43:31.462985Z node 5 :TX_PROXY DEBUG: describe.cpp:446: Actor# [5:1057:2876] Handle TEvDescribeSchemeResult Forward to# [5:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-10T12:43:31.463904Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1061:2880], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.463963Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.464007Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1060:2879], serverId# [5:1061:2880], sessionId# [0:0:0] 2026-01-10T12:43:31.464354Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [5:1059:2878], Recipient [5:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-10T12:43:31.465241Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1064:2883], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.465402Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.465440Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1063:2882], serverId# [5:1064:2883], sessionId# [0:0:0] 2026-01-10T12:43:31.465541Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1062:2881], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-10T12:43:31.465648Z node 5 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [5:1062:2881], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2026-01-10T12:43:31.468177Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.532835Z 2026-01-10T12:43:31.468274Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-10T12:43:31.468336Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:1062:2881]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-10T12:43:31.469063Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [5:877:2759], Recipient [5:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-10T12:43:31.469589Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1071:2889], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.469673Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:31.469738Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1070:2888], serverId# [5:1071:2889], sessionId# [0:0:0] 2026-01-10T12:43:31.469943Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1069:2887], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-10T12:43:31.470085Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:118: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 38], requested from# [5:1069:2887] is not needed |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_background_compaction/unittest >> test_commit.py::TestCommit::test_commit [GOOD] >> QueryActorTest::SimpleQuery >> test_timeout.py::TestTimeout::test_timeout >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [FAIL] >> test_http_api.py::TestHttpApi::test_empty_query >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> test_example.py::TestExample::test_example2 >> test_restarts.py::test_basic [FAIL] >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> DataShardReassign::AutoReassignOnYellowFlag >> DataShardCompaction::CompactBorrowed ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] 2026-01-10 12:43:38,001 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:43:38,163 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 603913 716M 704M 636M ydb-tests-datashard-async_replication --basetemp /home/runner/.ya/build/build_root/d9xi/003447/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --d Test command err: Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/d9xi/003447/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003447', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003447/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003447/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/datashard/async_replication', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '20', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/datashard/async_replication', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/d9xi/003447/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003447', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003447/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003447/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/datashard/async_replication', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '20', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/datashard/async_replication', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2026-01-10 12:43:38,745 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-10 12:43:38,745 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |99.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> test_http_api.py::TestHttpApi::test_warning [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> Login::TokenIsNotExpired >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] >> Login::TokenIsNotExpired [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |98.9%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback |99.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite >> test.py::test_local [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> ReadUpdateWrite::Load [GOOD] |99.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] |99.0%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test |99.0%| [TS] {BAZEL_UPLOAD} ydb/tests/tools/pq_read/test/py3test >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration >> test_example.py::TestExample::test_example2 [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/auth/gtest >> Login::TokenIsNotExpired [GOOD] |99.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/auth/gtest |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/auth/gtest >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_tests/py3test >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |99.0%| [TM] {RESULT} ydb/tests/functional/serializable/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serializable/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:41.644176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:41.906504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:41.932117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:41.932800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:41.932917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:42.838862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:42.839016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:42.854335Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768049017540552 != 1768049017540555 2026-01-10T12:43:42.894298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:42.961734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:43.092191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:43.469784Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2026-01-10T12:43:43.469868Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.472746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:43.473034Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{189, redo 997b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2026-01-10T12:43:43.473104Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.474137Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:1:24576:527:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.474274Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.474454Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} commited cookie 1 for step 190 2026-01-10T12:43:43.474836Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-10T12:43:43.474899Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.476357Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{190, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-10T12:43:43.476447Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.476742Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.476790Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.476867Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} commited cookie 1 for step 191 2026-01-10T12:43:43.476987Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-10T12:43:43.477028Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.477178Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{191, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-10T12:43:43.477206Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.477405Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.477451Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.477504Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} commited cookie 1 for step 192 2026-01-10T12:43:43.477585Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-10T12:43:43.477613Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.477715Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{192, redo 121b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-10T12:43:43.477741Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.477886Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:1:24576:97:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.477922Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.477961Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} commited cookie 1 for step 193 2026-01-10T12:43:43.478470Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2026-01-10T12:43:43.478523Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.478615Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{193, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:43:43.478706Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.490428Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:43.604422Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:43:43.604518Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.604849Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{8, redo 264b alter 0b annex 0, ~{ 0, 4, 2 } -{ }, 0 gb} 2026-01-10T12:43:43.604933Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.605400Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:1:24576:165:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.605459Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.605578Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:10} commited cookie 1 for step 9 2026-01-10T12:43:43.606106Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep 2026-01-10T12:43:43.606186Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:43.606805Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} hope 1 -> done Change{193, redo 819b alter 0b annex 0, ~{ 1, 18, 33, 27, 35, 42, 48, 4 } -{ }, 0 gb} 2026-01-10T12:43:43.606884Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:43.607232Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:194:1:24576:427:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:43.607348Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:1 ... 31750Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:48.832252Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:48.832341Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:48.832470Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2026-01-10T12:43:48.988558Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:43:48.988675Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:48.988853Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{26, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-10T12:43:48.988918Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:48.989395Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:48.989483Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:48.989598Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2026-01-10T12:43:49.139544Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:43:49.139643Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.139811Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{27, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-10T12:43:49.139891Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.140367Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.140472Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.140567Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2026-01-10T12:43:49.153112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:43:49.153177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:49.182171Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2026-01-10T12:43:49.182276Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.182402Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:43:49.182462Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.324715Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:43:49.324801Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.324950Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{28, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-10T12:43:49.325027Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.325516Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.325606Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.325711Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2026-01-10T12:43:49.436388Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-10T12:43:49.436490Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-10T12:43:49.436630Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2026-01-10T12:43:49.436708Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.436802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 16000 last cleanup 0 2026-01-10T12:43:49.436876Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:43:49.436932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-10T12:43:49.436982Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:43:49.437023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:43:49.437122Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:43:49.437182Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.437367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-10T12:43:49.500591Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-10T12:43:49.500693Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.500831Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{29, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-10T12:43:49.500876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.501282Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.501344Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.501437Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2026-01-10T12:43:49.528603Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-10T12:43:49.528675Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.528982Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{13, redo 193b alter 0b annex 0, ~{ 1, 16, 4 } -{ }, 0 gb} 2026-01-10T12:43:49.529039Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.529247Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-10T12:43:49.529326Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:43:49.529485Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{14, redo 329b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-10T12:43:49.529569Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:43:49.542619Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037968897:2:11:0:0:265:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-10T12:43:49.542740Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} commited cookie 1 for step 11 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2026-01-10T12:43:49.663989Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow current light yellow move channels: [ 0 1 ] 2026-01-10T12:43:49.664059Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow reassign channels: [ 0 1 ] tablet# 72075186224037888 hive# 72057594037968897 --- Captured TEvReassignTablet event |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_reassign/unittest >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 1 MiB, Speed: 1844674407370955160 MiB/s Write: 10% 0.411289s 30% 0.411289s 50% 0.411289s 90% 0.411289s 99% 0.411289s Write: 10% 0.547946s 30% 0.547946s 50% 0.547946s 90% 0.547946s 99% 0.547946s Write: 10% 0.363985s 30% 0.363985s 50% 0.363985s 90% 0.363985s 99% 0.363985s Write: 10% 0.652500s 30% 0.652500s 50% 0.652500s 90% 0.652500s 99% 0.652500s Write: 10% 0.332962s 30% 0.332962s 50% 0.332962s 90% 0.332962s 99% 0.332962s Write: 10% 0.818800s 30% 0.818800s 50% 0.818800s 90% 0.818800s 99% 0.818800s Write: 10% 0.590057s 30% 0.590057s 50% Write: 10% 0.422552s 30% 0.422552s 50% 0.590057s 90% 0.590057s 99% 0.590057s0.422552s 90% 0.422552s 99% 0.422552s Write: 10% 0.825756s 30% 0.825756s 50% 0.825756s 90% 0.825756s 99% 0.825756s Write: 10% 0.615885s 30% 0.615885s 50% 0.615885s 90% 0.615885s 99% 0.615885s Write: 10% 0.560951s 30% 0.560951s 50% 0.560951s 90% 0.560951s 99% 0.560951s Write: 10% 0.614043s 30% 0.614043s 50% 0.614043s 90% 0.614043s 99% 0.614043s Write: 10% 0.862710s 30% 0.862710s 50% 0.862710s 90% 0.862710s 99% 0.862710s Write: 10% 0.519539s 30% 0.519539s 50% 0.519539s 90% 0.519539s 99% 0.519539s Write: 10% 0.875114s 30% 0.875114s 50% 0.875114s 90% 0.875114s 99% 0.875114s Write: 10% 0.625919s 30% 0.625919s 50% Write: 10% 0.811753s 30% 0.811753s 50% 0.811753s 90% 0.811753s 99% 0.811753s0.625919s 90% 0.625919s 99% 0.625919s Write: 10% 0.670165s 30% 0.670165s 50% 0.670165s 90% 0.670165s 99% 0.670165s Write: 10% 0.476365s 30% 0.476365s 50% 0.476365s 90% 0.476365s 99% 0.476365s Write: 10% 0.639890s 30% 0.639890s 50% 0.639890s 90% 0.639890s 99% 0.639890s Write: 10% 1.015348s 30% 1.015348s 50% 1.015348s 90% 1.015348s 99% 1.015348s Write: 10% 1.113272s 30% 1.113272s 50% 1.113272s 90% 1.113272s 99% 1.113272s Write: 10% 1.170836s 30% 1.170836s 50% 1.170836s 90% 1.170836s 99% 1.170836s Write: 10% 0.930323s 30% 0.930323s 50% 0.930323s 90% 0.930323s 99% 0.930323s Write: 10% 0.951382s 30% 0.951382s 50% 0.951382s 90% 0.951382s 99% 0.951382s Write: 10% 1.205261s 30% 1.205261s 50% 1.205261s 90% 1.205261s 99% 1.205261s Write: 10% 0.811524s 30% 0.811524s 50% 0.811524s 90% 0.811524s 99% 0.811524s Write: 10% 1.114643s 30% 1.114643s 50% 1.114643s 90% 1.114643s 99% 1.114643s Write: 10% 0.833246s 30% 0.833246s 50% 0.833246s 90% 0.833246s 99% 0.833246s Write: 10% 0.826300s 30% 0.826300s 50% 0.826300s 90% 0.826300s 99% 0.826300s Write: 10% 0.979939s 30% 0.979939s 50% 0.979939s 90% 0.979939s 99% 0.979939s Write: 10% 0.864671s 30% 0.864671s 50% 0.864671s 90% 0.864671s 99% 0.864671s Write: 10% 1.253693s 30% 1.253693s 50% 1.253693s 90% 1.253693s 99% 1.253693s Write: 10% 1.500489s 30% 1.500489s 50% 1.500489s 90% 1.500489s 99% 1.500489s Write: 10% 0.883664s 30% 0.883664s 50% 0.883664s 90% 0.883664s 99% 0.883664s Write: 10% 0.695335s 30% 0.695335s 50% 0.695335s 90% 0.695335s 99% 0.695335s Write: 10% 1.216349s 30% 1.216349s 50% 1.216349s 90% 1.216349s 99% 1.216349s Write: 10% 0.652603s 30% 0.652603s 50% 0.652603s 90% 0.652603s 99% 0.652603s Write: 10% 0.681672s 30% 0.681672s 50% 0.681672s 90% 0.681672s 99% 0.681672s Write: 10% 1.048464s 30% 1.048464s 50% 1.048464s 90% 1.048464s 99% 1.048464s Write: 10% 0.676851s 30% 0.676851s 50% 0.676851s 90% 0.676851s 99% 0.676851s Write: 10% 0.679469s 30% 0.679469s 50% 0.679469s 90% 0.679469s 99% 0.679469s Write: 10% 0.678050s 30% 0.678050s 50% 0.678050s 90% 0.678050s 99% 0.678050s Write: 10% 0.691886s 30% 0.691886s 50% 0.691886s 90% 0.691886s 99% 0.691886s Write: 10% 0.701423s 30% 0.701423s 50% 0.701423s 90% 0.701423s 99% 0.701423s Write: 10% 0.717779s 30% 0.717779s 50% 0.717779s 90% 0.717779s 99% 0.717779s Write: 10% 0.715893s 30% 0.715893s 50% 0.715893s 90% 0.715893s 99% 0.715893s Write: 10% 0.779100s 30% 0.779100s 50% 0.779100s 90% 0.779100s 99% 0.779100s Write: 10% 0.657108s 30% 0.657108s 50% 0.657108s 90% 0.657108s 99% 0.657108s Write: 10% 0.974411s 30% 0.974411s 50% 0.974411s 90% 0.974411s 99% 0.974411s Write: 10% 0.740490s 30% 0.740490s 50% 0.740490s 90% 0.740490s 99% 0.740490s Write: 10% 0.687862s 30% 0.687862s 50% 0.687862s 90% 0.687862s 99% 0.687862s Write: 10% 0.668327s 30% 0.668327s 50% 0.668327s 90% 0.668327s 99% 0.668327s Write: 10% 0.709138s 30% 0.709138s 50% 0.709138s 90% 0.709138s 99% 0.709138s Write: 10% 0.641165s 30% 0.641165s 50% 0.641165s 90% 0.641165s 99% 0.641165s Write: 10% 0.744416s 30% 0.744416s 50% 0.744416s 90% 0.744416s 99% 0.744416s Write: 10% 0.782912s 30% 0.782912s 50% 0.782912s 90% 0.782912s 99% 0.782912s Write: 10% 0.711203s 30% 0.711203s 50% 0.711203s 90% 0.711203s 99% 0.711203s Write: 10% 0.758002s 30% 0.758002s 50% 0.758002s 90% 0.758002s 99% 0.758002s Write: 10% 0.712345s 30% 0.712345s 50% 0.712345s 90% 0.712345s 99% 0.712345s Write: 10% 0.762254s 30% 0.762254s 50% 0.762254s 90% 0.762254s 99% 0.762254s Write: 10% 0.710075s 30% 0.710075s 50% 0.710075s 90% 0.710075s 99% 0.710075s Write: 10% 0.739456s 30% 0.739456s 50% 0.739456s 90% 0.739456s 99% 0.739456s Write: 10% 0.688199s 30% 0.688199s 50% 0.688199s 90% 0.688199s 99% 0.688199s Step 2. read write Write: 10% 0.458174s 30% 0.458174s 50% 0.458174s 90% 0.458174s 99% 0.458174s Write: 10% 0.382452s 30% 0.382452s 50% 0.382452s 90% 0.382452s 99% 0.382452s Write: 10% 0.440323s 30% 0.440323s 50% 0.440323s 90% 0.440323s 99% 0.440323s Write: 10% 0.390450s 30% 0.390450s 50% 0.390450s 90% 0.390450s 99% 0.390450s Write: 10% 0.952498s 30% 0.952498s 50% Write: 10% 0.902858s 30% 0.902858s 50% 0.902858s 90% 0.902858s 99% 0.902858s 0.952498s 90% 0.952498s 99% 0.952498s Write: 10% 0.957419s 30% 0.957419s 50% 0.957419s 90% 0.957419s 99% 0.957419s Write: 10% 0.611408s 30% 0.611408s 50% 0.611408s 90% 0.611408s 99% 0.611408s Write: 10% 0.895581s 30% 0.895581s 50% 0.895581s 90% 0.895581s 99% 0.895581s Write: 10% 0.607992s 30% 0.607992s 50% 0.607992s 90% 0.607992s 99% 0.607992s Write: 10% 0.942301s 30% 0.942301s 50% 0.942301s 90% 0.942301s 99% 0.942301s Write: 10% 0.982840s 30% 0.982840s 50% 0.982840s 90% 0.982840s 99% 0.982840s Write: 10% 0.873907s 30% 0.873907s 50% 0.873907s 90% 0.873907s 99% 0.873907s Write: 10% 0.834786s 30% 0.834786s 50% Write: 10% 0.888940s 30% 0.888940s 50% 0.888940s 90% 0.888940s 99% 0.888940s 0.834786s 90% 0.834786s 99% 0.834786sWrite: 10% 0.897403s 30% 0.897403s 50% 0.897403s 90% 0.897403s 99% 0.897403s Write: 10% 0.869182s 30% 0.869182s 50% 0.869182s 90% 0.869182s 99% 0.869182s Write: 10% 0.916099s 30% 0.916099s 50% 0.916099s 90% 0.916099s 99% 0.916099s Write: 10% 0.908574s 30% 0.908574s 50% 0.908574s 90% 0.908574s 99% 0.908574s Write: 10% 0.511963s 30% 0.511963s 50% 0.511963s 90% 0.511963s 99% 0.511963s Write: 10% 0.904378s 30% 0.904378s 50% 0.904378s 90% 0.904378s 99% 0.904378s Write: 10% 1.039473s 30% 1.039473s 50% 1.039473s 90% 1.039473s 99% 1.039473s Write: 10% 1.026206s 30% 1.026206s 50% 1.026206s 90% 1.026206s 99% 1.026206s Write: 10% 0.939970s 30% 0.939970s 50% 0.939970s 90% 0.939970s 99% 0.939970s Write: 10% 0.741135s 30% 0.741135s 50% 0.741135s 90% 0.741135s 99% 0.741135s Write: 10% 0.941366s 30% 0.941366s 50% 0.941366s 90% 0.941366s 99% 0.941366s Write: 10% 1.058027s 30% 1.058027s 50% 1.058027s 90% 1.058027s 99% 1.058027s Write: 10% 1.198495s 30% 1.198495s 50% 1.198495s 90% 1.198495s 99% 1.198495s Write: 10% 1.358390s 30% 1.358390s 50% 1.358390s 90% 1.358390s 99% 1.358390s Write: 10% 1.179911s 30% 1.179911s 50% 1.179911s 90% 1.179911s 99% 1.179911s Write: 10% 1.544537s 30% 1.544537s 50% 1.544537s 90% 1.544537s 99% 1.544537s Write: 10% 0.943731s 30% 0.943731s 50% 0.943731s 90% 0.943731s 99% 0.943731s Write: 10% 0.993810s 30% 0.993810s 50% 0.993810s 90% 0.993810s 99% 0.993810s Write: 10% 1.581242s 30% 1.581242s 50% 1.581242s 90% 1.581242s 99% 1.581242s Write: 10% 0.911201s 30% 0.911201s 50% 0.911201s 90% 0.911201s 99% 0.911201s Write: 10% 1.447346s 30% 1.447346s 50% 1.447346s 90% 1.447346s 99% 1.447346s Write: 10% 0.979601s 30% 0.979601s 50% 0.979601s 90% 0.979601s 99% 0.979601s Write: 10% 0.946284s 30% 0.946284s 50% 0.946284s 90% 0.946284s 99% 0.946284s Write: 10% 0.906522s 30% 0.906522s 50% 0.906522s 90% 0.906522s 99% 0.906522s Write: 10% 1.009569s 30% 1.009569s 50% 1.009569s 90% 1.009569s 99% 1.009569s Write: 10% 0.777582s 30% 0.777582s 50% 0.777582s 90% 0.777582s 99% 0.777582s Write: 10% 1.071020s 30% 1.071020s 50% 1.071020s 90% 1.071020s 99% 1.071020s Write: 10% 1.649401s 30% 1.649401s 50% 1.649401s 90% 1.649401s 99% 1.649401s Write: 10% 0.918491s 30% 0.918491s 50% 0.918491s 90% 0.918491s 99% 0.918491s Write: 10% 1.160949s 30% 1.160949s 50% 1.160949s 90% 1.160949s 99% 1.160949s Write: 10% 1.080614s 30% 1.080614s 50% 1.080614s 90% 1.080614s 99% 1.080614s Write: 10% 1.033996s 30% 1.033996s 50% 1.033996s 90% 1.033996s 99% 1.033996s Write: 10% 1.208904s 30% 1.208904s 50% 1.208904s 90% 1.208904s 99% 1.208904s Write: 10% 1.705700s 30% 1.705700s 50% 1.705700s 90% 1.705700s 99% 1.705700s Write: 10% 1.043630s 30% 1.043630s 50% 1.043630s 90% 1.043630s 99% 1.043630s Write: 10% 0.929471s 30% 0.929471s 50% 0.929471s 90% 0.929471s 99% 0.929471s Write: 10% 0.963404s 30% 0.963404s 50% 0.963404s 90% 0.963404s 99% 0.963404s Write: 10% 1.036179s 30% 1.036179s 50% 1.036179s 90% 1.036179s 99% 1.036179s Write: 10% 0.985089s 30% 0.985089s 50% 0.985089s 90% 0.985089s 99% 0.985089s Write: 10% 0.995140s 30% 0.995140s 50% 0.995140s 90% 0.995140s 99% 0.995140s Write: 10% 0.999287s 30% 0.999287s 50% 0.999287s 90% 0.999287s 99% 0.999287s Write: 10% 0.987344s 30% 0.987344s 50% 0.987344s 90% 0.987344s 99% 0.987344s Write: 10% 0.948156s 30% 0.948156s 50% 0.948156s 90% 0.948156s 99% 0.948156s Write: 10% 1.184987s 30% 1.184987s 50% 1.184987s 90% 1.184987s 99% 1.184987s Write: 10% 1.033031s 30% 1.033031s 50% 1.033031s 90% 1.033031s 99% 1.033031s Write: 10% 1.045245s 30% 1.045245s 50% 1.045245s 90% 1.045245s 99% 1.045245s Write: 10% 1.006287s 30% 1.006287s 50% 1.006287s 90% 1.006287s 99% 1.006287s Write: 10% 1.115688s 30% 1.115688s 50% 1.115688s 90% 1.115688s 99% 1.115688s Write: 10% 1.094133s 30% 1.094133s 50% 1.094133s 90% 1.094133s 99% 1.094133s Read: 10% 2.601993s 30% 2.601993s 50% 2.601993s 90% 2.601993s 99% 2.601993s Step 3. write modify Write: 10% 0.281495s 30% 0.281495s 50% 0.281495s 90% 0.281495s 99% 0.281495s Write: 10% 0.231793s 30% 0.231793s 50% 0.231793s 90% 0.231793s 99% 0.231793s Write: 10% 0.596918s 30% 0.596918s 50% 0.596918s 90% 0.596918s 99% 0.596918s Write: 10% 0.465720s 30% 0.465720s 50% 0.465720s 90% 0.465720s 99% 0.465720s Write: 10% 0.641389s 30% 0.641389s 50% Write: 10% 0.538645s 30% 0.538645s 50% 0.641389s 90% 0.641389s 99% 0.641389s 0.538645s 90% 0.538645s 99% 0.538645s Write: 10% 0.597399s 30% 0.597399s 50% 0.597399s 90% 0.597399s 99% 0.597399s Write: 10% 0.705733s 30% 0.705733s 50% 0.705733s 90% 0.705733s 99% 0.705733s Write: 10% 0.971906s 30% 0.971906s 50% 0.971906s 90% 0.971906s 99% 0.971906s Write: 10% 0.738353s 30% 0.738353s 50% 0.738353s 90% 0.738353s 99% 0.738353s Write: 10% 0.917804s 30% 0.917804s 50% 0.917804s 90% 0.917804s 99% 0.917804s Write: 10% 1.067664s 30% 1.067664s 50% 1.067664s 90% 1.067664s 99% 1.067664s Write: 10% 1.116213s 30% 1.116213s 50% 1.116213s 90% 1.116213s 99% 1.116213s Write: 10% 0.636805s 30% 0.636805s 50% 0.636805s 90% 0.636805s 99% 0.636805s Write: 10% 1.107114s 30% 1.107114s 50% 1.107114s 90% 1.107114s 99% 1.107114s Write: 10% 1.139125s 30% 1.139125s 50% 1.139125s 90% 1.139125s 99% 1.139125s Write: 10% 1.062582s 30% 1.062582s 50% 1.062582s 90% 1.062582s 99% 1.062582s Write: 10% 0.998640s 30% 0.998640s 50% 0.998640s 90% 0.998640s 99% 0.998640s Write: 10% 0.661092s 30% 0.661092s 50% 0.661092s 90% 0.661092s 99% 0.661092s Write: 10% 1.173289s 30% 1.173289s 50% 1.173289s 90% 1.173289s 99% 1.173289s Write: 10% 0.672606s 30% 0.672606s 50% 0.672606s 90% 0.672606s 99% 0.672606s Write: 10% 0.842965s 30% 0.842965s 50% 0.842965s 90% 0.842965s 99% 0.842965s Write: 10% 0.961854s 30% 0.961854s 50% 0.961854s 90% 0.961854s 99% 0.961854s Write: 10% 0.963290s 30% 0.963290s 50% 0.963290s 90% 0.963290s 99% 0.963290s Write: 10% 1.284748s 30% 1.284748s 50% 1.284748s 90% 1.284748s 99% 1.284748s Write: 10% 0.701066s 30% 0.701066s 50% 0.701066s 90% 0.701066s 99% 0.701066s Write: 10% 0.917641s 30% 0.917641s 50% 0.917641s 90% 0.917641s 99% 0.917641s Write: 10% 1.024834s 30% 1.024834s 50% 1.024834s 90% 1.024834s 99% 1.024834s Write: 10% 1.266354s 30% 1.266354s 50% 1.266354s 90% 1.266354s 99% 1.266354s Write: 10% 0.773953s 30% 0.773953s 50% 0.773953s 90% 0.773953s 99% 0.773953s Write: 10% 0.903181s 30% 0.903181s 50% 0.903181s 90% 0.903181s 99% 0.903181s Write: 10% 1.054613s 30% 1.054613s 50% 1.054613s 90% 1.054613s 99% 1.054613s Write: 10% 0.780294s 30% 0.780294s 50% 0.780294s 90% 0.780294s 99% 0.780294s Write: 10% 1.241174s 30% 1.241174s 50% 1.241174s 90% 1.241174s 99% 1.241174s Write: 10% 0.969992s 30% 0.969992s 50% 0.969992s 90% 0.969992s 99% 0.969992s Write: 10% 0.848130s 30% 0.848130s 50% 0.848130s 90% 0.848130s 99% 0.848130s Write: 10% 0.883165s 30% 0.883165s 50% 0.883165s 90% 0.883165s 99% 0.883165s Write: 10% 0.724271s 30% 0.724271s 50% 0.724271s 90% 0.724271s 99% 0.724271s Write: 10% 1.452049s 30% 1.452049s 50% 1.452049s 90% 1.452049s 99% 1.452049s Write: 10% 0.791330s 30% 0.791330s 50% 0.791330s 90% 0.791330s 99% 0.791330s Write: 10% 0.779683s 30% 0.779683s 50% 0.779683s 90% 0.779683s 99% 0.779683s Write: 10% 0.746035s 30% 0.746035s 50% 0.746035s 90% 0.746035s 99% 0.746035s Write: 10% 0.754039s 30% 0.754039s 50% 0.754039s 90% 0.754039s 99% 0.754039s Write: 10% 0.748785s 30% 0.748785s 50% 0.748785s 90% 0.748785s 99% 0.748785s Write: 10% 0.745035s 30% 0.745035s 50% 0.745035s 90% 0.745035s 99% 0.745035s Write: 10% 0.775412s 30% 0.775412s 50% 0.775412s 90% 0.775412s 99% 0.775412s Write: 10% 0.727936s 30% 0.727936s 50% 0.727936s 90% 0.727936s 99% 0.727936s Write: 10% 0.804604s 30% 0.804604s 50% 0.804604s 90% 0.804604s 99% 0.804604s Write: 10% 0.727583s 30% 0.727583s 50% 0.727583s 90% 0.727583s 99% 0.727583s Write: 10% 0.748489s 30% 0.748489s 50% 0.748489s 90% 0.748489s 99% 0.748489s Write: 10% 0.825989s 30% 0.825989s 50% 0.825989s 90% 0.825989s 99% 0.825989s Write: 10% 0.776688s 30% 0.776688s 50% 0.776688s 90% 0.776688s 99% 0.776688s Write: 10% 0.799506s 30% 0.799506s 50% 0.799506s 90% 0.799506s 99% 0.799506s Write: 10% 0.870020s 30% 0.870020s 50% 0.870020s 90% 0.870020s 99% 0.870020s Write: 10% 0.780007s 30% 0.780007s 50% 0.780007s 90% 0.780007s 99% 0.780007s Write: 10% 0.940987s 30% 0.940987s 50% 0.940987s 90% 0.940987s 99% 0.940987s Write: 10% 0.831434s 30% 0.831434s 50% 0.831434s 90% 0.831434s 99% 0.831434s Write: 10% 0.908056s 30% 0.908056s 50% 0.908056s 90% 0.908056s 99% 0.908056s Write: 10% 1.000727s 30% 1.000727s 50% 1.000727s 90% 1.000727s 99% 1.000727s Write: 10% 0.873028s 30% 0.873028s 50% 0.873028s 90% 0.873028s 99% 0.873028s Write: 10% 0.856705s 30% 0.856705s 50% 0.856705s 90% 0.856705s 99% 0.856705s Write: 10% 0.857851s 30% 0.857851s 50% 0.857851s 90% 0.857851s 99% 0.857851s Write: 10% 0.966839s 30% 0.966839s 50% 0.966839s 90% 0.966839s 99% 0.966839s Write: 10% 0.935886s 30% 0.935886s 50% 0.935886s 90% 0.935886s 99% 0.935886s Update: 10% 0.916540s 30% 0.916540s 50% 0.916540s 90% 0.916540s 99% 0.916540s Step 4. read modify write Write: 10% 0.365082s 30% 0.365082s 50% 0.365082s 90% 0.365082s 99% 0.365082s Write: 10% 1.504295s 30% 1.504295s 50% 1.504295s 90% 1.504295s 99% 1.504295s Write: 10% 1.618021s 30% 1.618021s 50% 1.618021s 90% 1.618021s 99% 1.618021s Write: 10% 1.980199s 30% 1.980199s 50% 1.980199s 90% 1.980199s 99% 1.980199s Write: 10% 2.499002s 30% 2.499002s 50% 2.499002s 90% 2.499002s 99% 2.499002s Write: 10% 2.636695s 30% 2.636695s 50% 2.636695s 90% 2.636695s 99% 2.636695s Was written: 87 MiB, Speed: 8 MiB/s Write: 10% 3.537881s 30% 3.537881s 50% 3.537881s 90% 3.537881s 99% 3.537881s Write: 10% 4.238889s 30% 4.238889s 50% 4.238889s 90% 4.238889s 99% 4.238889s Write: 10% 4.652826s 30% 4.652826s 50% 4.652826s 90% 4.652826s 99% 4.652826s Write: 10% 5.944289s 30% 5.944289s 50% 5.944289s 90% 5.944289s 99% 5.944289s Write: 10% 8.059550s 30% 8.059550s 50% 8.059550s 90% 8.059550s 99% 8.059550s Write: 10% 8.075988s 30% 8.075988s 50% 8.075988s 90% 8.075988s 99% 8.075988s Write: 10% 8.538803s 30% 8.538803s 50% 8.538803s 90% 8.538803s 99% 8.538803s Write: 10% 8.559585s 30% 8.559585s 50% 8.559585s 90% 8.559585s 99% 8.559585s Write: 10% 9.029057s 30% 9.029057s 50% 9.029057s 90% 9.029057s 99% 9.029057s Write: 10% 9.087047s 30% 9.087047s 50% 9.087047s 90% 9.087047s 99% 9.087047s Write: 10% 9.423272s 30% 9.423272s 50% 9.423272s 90% 9.423272s 99% 9.423272s Write: 10% 9.627726s 30% 9.627726s 50% 9.627726s 90% 9.627726s 99% 9.627726s Write: 10% 9.861972s 30% 9.861972s 50% 9.861972s 90% 9.861972s 99% 9.861972s Write: 10% 10.277687s 30% 10.277687s 50% 10.277687s 90% 10.277687s 99% 10.277687s Write: 10% 10.316148s 30% 10.316148s 50% 10.316148s 90% 10.316148s 99% 10.316148s Write: 10% 10.370475s 30% 10.370475s 50% 10.370475s 90% 10.370475s 99% 10.370475s Write: 10% 10.485585s 30% 10.485585s 50% 10.485585s 90% 10.485585s 99% 10.485585s Write: 10% 10.530255s 30% 10.530255s 50% 10.530255s 90% 10.530255s 99% 10.530255s Write: 10% 10.613722s 30% 10.613722s 50% 10.613722s 90% 10.613722s 99% 10.613722s Write: 10% 10.640516s 30% 10.640516s 50% 10.640516s 90% 10.640516s 99% 10.640516s Write: 10% 10.727247s 30% 10.727247s 50% 10.727247s 90% 10.727247s 99% 10.727247s Write: 10% 10.877354s 30% 10.877354s 50% 10.877354s 90% 10.877354s 99% 10.877354s Write: 10% 10.875212s 30% 10.875212s 50% 10.875212s 90% 10.875212s 99% 10.875212s Write: 10% 11.337993s 30% 11.337993s 50% 11.337993s 90% 11.337993s 99% 11.337993s Write: 10% 11.330994s 30% 11.330994s 50% 11.330994s 90% 11.330994s 99% 11.330994s Write: 10% 11.606640s 30% 11.606640s 50% 11.606640s 90% 11.606640s 99% 11.606640s Write: 10% 12.247888s 30% 12.247888s 50% 12.247888s 90% 12.247888s 99% 12.247888s Was written: 87 MiB, Speed: 0 MiB/s Write: 10% 12.341951s 30% 12.341951s 50% 12.341951s 90% 12.341951s 99% 12.341951s Write: 10% 12.539309s 30% 12.539309s 50% 12.539309s 90% 12.539309s 99% 12.539309s Write: 10% 12.576082s 30% 12.576082s 50% 12.576082s 90% 12.576082s 99% 12.576082s Write: 10% 12.621722s 30% 12.621722s 50% 12.621722s 90% 12.621722s 99% 12.621722s Write: 10% 12.580075s 30% 12.580075s 50% 12.580075s 90% 12.580075s 99% 12.580075s Write: 10% 12.628259s 30% 12.628259s 50% 12.628259s 90% 12.628259s 99% 12.628259s Write: 10% 12.628759s 30% 12.628759s 50% 12.628759s 90% 12.628759s 99% 12.628759s Write: 10% 12.641166s 30% 12.641166s 50% 12.641166s 90% 12.641166s 99% 12.641166s Write: 10% 12.532074s 30% 12.532074s 50% 12.532074s 90% 12.532074s 99% 12.532074s Write: 10% 12.685965s 30% 12.685965s 50% 12.685965s 90% 12.685965s 99% 12.685965s Write: 10% 12.740532s 30% 12.740532s 50% 12.740532s 90% 12.740532s 99% 12.740532s Write: 10% 12.832885s 30% 12.832885s 50% 12.832885s 90% 12.832885s 99% 12.832885s Write: 10% 12.889252s 30% 12.889252s 50% 12.889252s 90% 12.889252s 99% 12.889252s Write: 10% 12.830449s 30% 12.830449s 50% 12.830449s 90% 12.830449s 99% 12.830449s Write: 10% 14.572124s 30% 14.572124s 50% 14.572124s 90% 14.572124s 99% 14.572124s Write: 10% 14.628555s 30% 14.628555s 50% 14.628555s 90% 14.628555s 99% 14.628555s Write: 10% 14.614199s 30% 14.614199s 50% 14.614199s 90% 14.614199s 99% 14.614199s Write: 10% 14.722972s 30% 14.722972s 50% 14.722972s 90% 14.722972s 99% 14.722972s Write: 10% 14.670039s 30% 14.670039s 50% 14.670039s 90% 14.670039s 99% 14.670039s Write: 10% 14.813587s 30% 14.813587s 50% 14.813587s 90% 14.813587s 99% 14.813587s Write: 10% 14.878143s 30% 14.878143s 50% 14.878143s 90% 14.878143s 99% 14.878143s Write: 10% 14.800771s 30% 14.800771s 50% 14.800771s 90% 14.800771s 99% 14.800771s Write: 10% 14.794562s 30% 14.794562s 50% 14.794562s 90% 14.794562s 99% 14.794562s Write: 10% 14.860911s 30% 14.860911s 50% 14.860911s 90% 14.860911s 99% 14.860911s Write: 10% 14.898370s 30% 14.898370s 50% 14.898370s 90% 14.898370s 99% 14.898370s Write: 10% 14.935880s 30% 14.935880s 50% 14.935880s 90% 14.935880s 99% 14.935880s Write: 10% 14.973869s 30% 14.973869s 50% 14.973869s 90% 14.973869s 99% 14.973869s Write: 10% 14.989888s 30% 14.989888s 50% 14.989888s 90% 14.989888s 99% 14.989888s Write: 10% 15.119689s 30% 15.119689s 50% 15.119689s 90% 15.119689s 99% 15.119689s Write: 10% 15.230766s 30% 15.230766s 50% 15.230766s 90% 15.230766s 99% 15.230766s Write: 10% 15.110020s 30% 15.110020s 50% 15.110020s 90% 15.110020s 99% 15.110020s Update: 10% 4.710013s 30% 4.710013s 50% 10.809590s 90% 10.809590s 99% 10.809590s Read: 10% 1.484163s 30% 2.174529s 50% 2.509834s 90% 4.361327s 99% 4.361327s |99.0%| [TM] {RESULT} ydb/tests/olap/high_load/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/high_load/unittest >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> test_vdisks.py::TestTinyVDisks::test_enabled >> test_example.py::TestExample::test_linked_with_testcase >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic [FAIL] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.0%| [TA] $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit >> TTopicReaderTests::TestRun_ReadOneMessage >> test_drain.py::TestHive::test_drain_on_stop >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:42:59.008229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:42:59.158902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:42:59.174543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:42:59.174980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:42:59.175047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:42:59.835506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:42:59.835649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:42:59.843289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048975854769 != 1768048975854772 2026-01-10T12:42:59.866292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:42:59.923270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:00.060361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:00.507352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:00.526212Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:00.647069Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:43:00.647190Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:43:00.649855Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:43:00.820923Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:43:00.821059Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:43:00.823758Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:43:00.823893Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:43:00.824297Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:43:00.824452Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:43:00.824592Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:43:00.829022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:00.830915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:43:00.831988Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:43:00.832137Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:43:00.895519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:43:00.896835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:43:00.898671Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:43:00.899107Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:00.950843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:43:00.951913Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:00.952064Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:00.956414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:43:00.956540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:43:00.956612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:43:00.958759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:00.958999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:00.959138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:43:00.970340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:01.017660Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:43:01.020513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:01.020697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:43:01.020748Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:43:01.020793Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:43:01.020841Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:01.021052Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:01.023837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:01.025644Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:43:01.025782Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:43:01.025909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:43:01.025965Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:43:01.026039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:43:01.026084Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:43:01.026132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:43:01.026168Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:43:01.026247Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:43:01.026363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:01.026414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:01.026476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:43:01.026582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:43:01.026632Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:43:01.026759Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:43:01.027163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:43:01.027250Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:43:01.027378Z ... }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2026-01-10T12:44:08.974729Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [8:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:44:08.974838Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [8:68:2115] TxId# 281474976710665 ProcessProposeKqpTransaction 2026-01-10T12:44:08.976748Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [8:1304:3063], Recipient [8:1279:3045]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:44:08.976866Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:44:08.976956Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1302:3062], serverId# [8:1304:3063], sessionId# [0:0:0] 2026-01-10T12:44:08.979512Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [8:1307:3064], Recipient [8:1279:3045]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-10T12:44:08.979583Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-10T12:44:08.979734Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-10T12:44:08.979824Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:44:08.980033Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-10T12:44:08.980278Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:837: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2026-01-10T12:44:08.981159Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:854: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2026-01-10T12:44:08.981749Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:925: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2026-01-10T12:44:08.981886Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-10T12:44:08.981992Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-10T12:44:08.982122Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:44:08.982187Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-10T12:44:08.982246Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-10T12:44:08.982308Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-10T12:44:08.982356Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-10T12:44:08.982420Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:44:08.982450Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-10T12:44:08.982474Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-10T12:44:08.982507Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-10T12:44:08.982679Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:44:08.982991Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Restart 2026-01-10T12:44:08.983033Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2026-01-10T12:44:08.983145Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:44:08.983233Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} pin 0 (0 b) load 1 (65 b) 2026-01-10T12:44:08.983333Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2026-01-10T12:44:08.983441Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} request page collection [72075186224037888:1:23:1:12288:190:0] pages [ 0 ] 2026-01-10T12:44:08.983549Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, loading 1 pages, 65 bytes, newly pinned 0 pages, 0 bytes 2026-01-10T12:44:08.983765Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:23:1:12288:190:0] ok OK}, type 1 2026-01-10T12:44:08.983876Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} activated 2026-01-10T12:44:08.984174Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-10T12:44:08.984231Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-10T12:44:08.984348Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-10T12:44:08.984900Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[8:1307:3064], 0} after executionsCount# 2 2026-01-10T12:44:08.984995Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[8:1307:3064], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2026-01-10T12:44:08.985114Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:44:08.985148Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-10T12:44:08.985198Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:44:08.985232Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:44:08.985281Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-10T12:44:08.985309Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:44:08.985347Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-10T12:44:08.985407Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-10T12:44:08.985523Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:44:08.985611Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2026-01-10T12:44:08.985687Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-10T12:44:08.985896Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553217, Sender [8:1279:3045], Recipient [8:1279:3045]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-10T12:44:08.985948Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3306: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2026-01-10T12:44:08.986060Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2026-01-10T12:44:08.986143Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-10T12:44:08.986243Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037890 ReadContinue for iterator# {[8:1307:3064], 0}, firstUnprocessedQuery# 0 2026-01-10T12:44:08.986349Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037890 ReadContinue: iterator# {[8:1307:3064], 0}, FirstUnprocessedQuery# 0 2026-01-10T12:44:08.986505Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3338: 72075186224037890 readContinue iterator# {[8:1307:3064], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-10T12:44:08.986593Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3362: 72075186224037890 read iterator# {[8:1307:3064], 0} finished in ReadContinue 2026-01-10T12:44:08.986725Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-10T12:44:08.986821Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2026-01-10T12:44:08.988485Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [8:1307:3064], Recipient [8:1279:3045]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-10T12:44:08.988551Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-10T12:44:08.988642Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_followers/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> QueryActorTest::StreamQuery [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TDqPqRdReadActorTests::SessionError [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict >> TopicSessionTests::BadDataSessionError >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2026-01-10T12:43:35.958981Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593712699461089800:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:35.960665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:43:36.076242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:36.812728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:36.812829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:36.853861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:43:36.973037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:36.988313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:36.995019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:37.002880Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:37.076261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:43:37.401900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:37.416212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:43:37.488625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:37.703808Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7593712708051025218:2541], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-10T12:43:40.440969Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:40.455763Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976024689.095898s seconds to be completed 2026-01-10T12:43:40.473224Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, workerId: [1:7593712720935927165:2309], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:43:40.473490Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:43:40.473719Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:43:40.473746Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:43:40.473767Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:40.476594Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7593712708051025218:2541], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, TxId: , text: SELECT 42 2026-01-10T12:43:40.493462Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7593712720935927165:2309] 2026-01-10T12:43:40.493526Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7593712720935927167:2555] 2026-01-10T12:43:40.959914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7593712699461089800:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:40.960018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:43:42.209342Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7593712720935927166:2310], selfId: [1:7593712703756057309:2266], source: [1:7593712720935927165:2309] 2026-01-10T12:43:42.210281Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7593712708051025218:2541], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, TxId: 2026-01-10T12:43:42.215350Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7593712708051025218:2541], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, TxId: 2026-01-10T12:43:42.224414Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=Njg0ODBlYzQtNDQyMjQwMzMtMmE1OTk2ZmItYWRjODZjYTM=, workerId: [1:7593712720935927165:2309], local sessions count: 0 2026-01-10T12:43:43.117238Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593712732108481274:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:43.117317Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:43:43.158473Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:43.403419Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:43.403780Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7593712732108481234:2081] 1768049023116432 != 1768049023116435 2026-01-10T12:43:43.424351Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-10T12:43:43.442307Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:43.442378Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:43.526342Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:43.670389Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-10T12:43:43.820607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:43.839713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:43:43.850489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:43.936785Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7593712732108482102:2538], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-10T12:43:44.126288Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:47.560754Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:47.564912Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976024681.986730s seconds to be completed 2026-01-10T12:43:47.574254Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=2&id=N2I0NDgyZGQtYmVmMWIwODEtYmUzNGFjNmYtM2QwNWFiNDM=, workerId: [2:7593712749288351347:2309], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:43:47.574464Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:43:47.574559Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:43:47.574586Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:43:47.574603Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:47.577184Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7593712732108482102:2538], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2I0NDgyZGQtYmVmMWIwODEtYmUzNGFjNmYtM2QwNWFiNDM=, TxId: , text: DECLARE $k As Uint64; DECLARE $v As String; UPSERT INTO TestTable (Key, Value) VALUES ($k, $v) 2026-01-10T12:43:47.580372Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=2&id=N2I0NDgyZGQtYmVmMWIwODEtYmUzNGFjNmYtM2QwNWFiNDM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7593712749288351347:2309] 2026-01-10T12:43:47.580412Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7593712749288351349:2 ... , opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:54.812923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:43:54.821543Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:54.887786Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-10T12:43:55.201258Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:58.514631Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:58.518881Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976024671.032758s seconds to be completed 2026-01-10T12:43:58.530439Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=NWFjYTM5ZGMtNjNmZmUxYTYtYjE3ODUyMDQtNTM5YTYzYzA=, workerId: [4:7593712797726504251:2309], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-10T12:43:58.530702Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:43:58.530969Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-10T12:43:58.530993Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-10T12:43:58.531013Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-10T12:43:58.531895Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-10T12:43:58.533777Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], Start read next stream part 2026-01-10T12:43:58.543700Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekyyn65d3dcpwzexm9g1cer", Created new session, sessionId: ydb://session/3?node_id=4&id=ZmEwM2VlNjItNDdiZTgwMjYtZWEwMmMzYzYtNWQxNzMxNWI=, workerId: [4:7593712797726504254:2310], database: /dc-1, longSession: 0, local sessions count: 2 2026-01-10T12:43:58.544047Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekyyn65d3dcpwzexm9g1cer, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZmEwM2VlNjItNDdiZTgwMjYtZWEwMmMzYzYtNWQxNzMxNWI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7593712797726504254:2310] 2026-01-10T12:43:58.545962Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593712797726504256:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:58.546034Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:58.546272Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7593712797726504255:2552] 2026-01-10T12:43:58.547237Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593712797726504265:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:58.547347Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:58.547720Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7593712797726504270:2315], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:58.558124Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-10T12:43:58.579216Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7593712797726504272:2316], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-10T12:43:58.654014Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7593712797726504324:2589] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-10T12:43:59.207901Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7593712780546634170:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:43:59.208833Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-10T12:44:09.477170Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:44:09.477207Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:44:09.615347Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:44:09.619315Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:333: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], Cancel stream request 2026-01-10T12:44:09.619420Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712780546635004:2534], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NWFjYTM5ZGMtNjNmZmUxYTYtYjE3ODUyMDQtNTM5YTYzYzA=, TxId: 2026-01-10T12:44:09.621531Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NWFjYTM5ZGMtNjNmZmUxYTYtYjE3ODUyMDQtNTM5YTYzYzA=, workerId: [4:7593712797726504251:2309], local sessions count: 1 2026-01-10T12:44:09.621535Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-10T12:44:09.621677Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976024659.929953s seconds to be completed 2026-01-10T12:44:09.625758Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=ZDkwYmY5NGItNDYzZjUyYmEtYzUxZWJjMmEtZjhiYTE3Zjg=, workerId: [4:7593712844971144676:2347], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-10T12:44:09.626015Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-10T12:44:09.628567Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-10T12:44:09.628775Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], Start read next stream part 2026-01-10T12:44:09.631077Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kekyz00w8x8ajzsam6dh05xg", Created new session, sessionId: ydb://session/3?node_id=4&id=ZjJhNTRlMTYtZWI5ZmZkZTgtZjI5MmNlNWItZTFmM2E1ZWQ=, workerId: [4:7593712844971144679:2348], database: /dc-1, longSession: 0, local sessions count: 3 2026-01-10T12:44:09.631344Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kekyz00w8x8ajzsam6dh05xg, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZjJhNTRlMTYtZWI5ZmZkZTgtZjI5MmNlNWItZTFmM2E1ZWQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7593712844971144679:2348] 2026-01-10T12:44:09.631375Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7593712844971144680:2643] 2026-01-10T12:44:09.761104Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1768049049753, txId: 281474976715663] shutting down 2026-01-10T12:44:09.773976Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kekyz00w8x8ajzsam6dh05xg", Forwarded response to sender actor, requestId: 5, sender: [4:7593712844971144677:2641], selfId: [4:7593712780546634306:2192], source: [4:7593712844971144679:2348] 2026-01-10T12:44:09.774619Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=ZjJhNTRlMTYtZWI5ZmZkZTgtZjI5MmNlNWItZTFmM2E1ZWQ=, workerId: [4:7593712844971144679:2348], local sessions count: 2 2026-01-10T12:44:09.782726Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-10T12:44:09.792220Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], Start read next stream part 2026-01-10T12:44:09.792347Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-10T12:44:09.792467Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7593712844971144674:2641], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZDkwYmY5NGItNDYzZjUyYmEtYzUxZWJjMmEtZjhiYTE3Zjg=, TxId: 2026-01-10T12:44:09.793997Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=ZDkwYmY5NGItNDYzZjUyYmEtYzUxZWJjMmEtZjhiYTE3Zjg=, workerId: [4:7593712844971144676:2347], local sessions count: 1 |99.0%| [TM] {RESULT} ydb/library/query_actor/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/library/query_actor/ut/unittest >> TDqPqRdReadActorTests::CoordinatorChanged >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |99.0%| [TM] {RESULT} ydb/tests/example/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/example/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> TDqPqRdReadActorTests::Backpressure >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> TopicSessionTests::BadDataSessionError [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TopicSessionTests::WrongFieldType >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:42.480428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:42.735198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:42.775548Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:42.776307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:42.776449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:43.792728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:43.792895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:43.800428Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768049018257455 != 1768049018257458 2026-01-10T12:43:43.820762Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:43.907839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:44.078966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:44.507142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:44.522934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:44.642820Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-10T12:43:44.642920Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-10T12:43:44.643887Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-10T12:43:44.783182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-10T12:43:44.783295Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-10T12:43:44.785488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-10T12:43:44.785622Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-10T12:43:44.786018Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-10T12:43:44.786195Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-10T12:43:44.786390Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-10T12:43:44.791559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:44.792622Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-10T12:43:44.793657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-10T12:43:44.793738Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-10T12:43:44.877322Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:43:44.878173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:43:44.880101Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-10T12:43:44.880475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:44.960503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:43:44.961317Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:44.961447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:44.967320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:43:44.967449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:43:44.967558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:43:44.970424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:44.970659Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:44.970776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-10T12:43:44.981826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:45.052821Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:43:45.060588Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:45.060837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-10T12:43:45.060920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:43:45.060964Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:43:45.061005Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:45.061246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:45.061300Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:45.064834Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:43:45.064966Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:43:45.065070Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:43:45.065117Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:43:45.065272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:43:45.065353Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:43:45.065389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:43:45.065428Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:43:45.065483Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:43:45.065642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:45.065690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:45.065737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-10T12:43:45.065835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-10T12:43:45.065874Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:43:45.065982Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:43:45.066361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:43:45.066432Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:43:45.072559Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transactio ... 1474976715661] at 72075186224037892 is DelayComplete 2026-01-10T12:44:21.338903Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2026-01-10T12:44:21.338938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2026-01-10T12:44:21.338970Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037892 on unit CompletedOperations 2026-01-10T12:44:21.339008Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037892 is Executed 2026-01-10T12:44:21.339036Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2026-01-10T12:44:21.339065Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037892 has finished 2026-01-10T12:44:21.339098Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:44:21.339129Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037892 2026-01-10T12:44:21.339160Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037892 has no attached operations 2026-01-10T12:44:21.339192Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037892 2026-01-10T12:44:21.350343Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:44:21.350430Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-10T12:44:21.350465Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037892 on unit CompleteOperation 2026-01-10T12:44:21.350531Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1648:3440], exec latency: 0 ms, propose latency: 1 ms 2026-01-10T12:44:21.350581Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-10T12:44:21.350752Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1648:3440] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2026-01-10T12:44:21.350801Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2026-01-10T12:44:21.350968Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1648:3440], Recipient [2:982:2833]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-10T12:44:21.351007Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-10T12:44:21.351084Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:982:2833], Recipient [2:982:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:44:21.351113Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:44:21.351177Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:44:21.351213Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:44:21.351250Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2026-01-10T12:44:21.351281Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2026-01-10T12:44:21.351312Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [72500:281474976715661] at 72075186224037890 2026-01-10T12:44:21.351349Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-10T12:44:21.351382Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-10T12:44:21.351413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2026-01-10T12:44:21.351443Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:44:21.351670Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Continue 2026-01-10T12:44:21.351698Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:44:21.351725Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-10T12:44:21.351750Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-10T12:44:21.351781Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:44:21.352228Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1680:3469], Recipient [2:982:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-10T12:44:21.352266Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-10T12:44:21.352471Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-10T12:44:21.352578Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-10T12:44:21.363288Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-10T12:44:21.363363Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037890 2026-01-10T12:44:21.363638Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:982:2833], Recipient [2:982:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:44:21.363684Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:44:21.363752Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-10T12:44:21.363790Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-10T12:44:21.363831Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for ReadTableScan 2026-01-10T12:44:21.363867Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-10T12:44:21.363905Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [72500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2026-01-10T12:44:21.363951Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-10T12:44:21.363989Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2026-01-10T12:44:21.364026Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2026-01-10T12:44:21.364057Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-10T12:44:21.364282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is DelayComplete 2026-01-10T12:44:21.364316Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2026-01-10T12:44:21.364348Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2026-01-10T12:44:21.364376Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompletedOperations 2026-01-10T12:44:21.364407Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-10T12:44:21.364430Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2026-01-10T12:44:21.364458Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037890 has finished 2026-01-10T12:44:21.364488Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:44:21.364520Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-10T12:44:21.364551Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-10T12:44:21.364582Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-10T12:44:21.382176Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:44:21.382272Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-10T12:44:21.382311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-10T12:44:21.382381Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1648:3440], exec latency: 1 ms, propose latency: 1 ms 2026-01-10T12:44:21.382430Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-10T12:44:21.382602Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1648:3440] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2026-01-10T12:44:21.382688Z node 2 :TX_PROXY INFO: datareq.cpp:834: Actor# [2:1648:3440] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_compaction/unittest >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TTopicReaderTests::TestRun_ReadOneMessage [FAIL] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TopicSessionTests::WrongFieldType [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict |99.0%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |99.1%| [TM] {RESULT} ydb/tests/functional/wardens/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/wardens/py3test >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_canonical_requests.py::Test::test_essential [FAIL] >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> TopicSessionTests::ReadNonExistentTopic >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TopicSessionTests::SlowSession |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] |99.1%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/postgresql/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-10T12:44:17.735316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593712877882775946:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:44:17.735395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:44:17.982698Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:44:18.015757Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:44:18.529165Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:18.529410Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:44:18.624335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:18.809963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:44:18.810098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:44:18.811690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:44:18.811767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:44:18.854738Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:44:18.854887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:44:18.855829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:44:18.863219Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:44:18.864353Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:44:18.865734Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:44:18.865887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:18.957144Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:44:19.239466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002d4b/r3tmp/yandexX5FnGG.tmp 2026-01-10T12:44:19.239485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002d4b/r3tmp/yandexX5FnGG.tmp 2026-01-10T12:44:19.240762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002d4b/r3tmp/yandexX5FnGG.tmp 2026-01-10T12:44:19.240854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:44:19.271656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:44:19.300658Z INFO: TTestServer started on Port 17714 GrpcPort 29319 PQClient connected to localhost:29319 === TenantModeEnabled() = 0 === Init PQ - start server on port 29319 2026-01-10T12:44:19.948668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:44:19.948861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:19.949074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:44:19.949091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:44:19.949291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:44:19.949337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:44:19.954651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:44:19.954812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:44:19.955006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:19.955044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:44:19.955057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-10T12:44:19.955073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-10T12:44:19.961689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:44:19.961733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-10T12:44:19.961751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-10T12:44:19.961971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:19.962014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:44:19.962044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-10T12:44:19.964595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:19.964629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:19.964661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:44:19.964689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-10T12:44:19.969652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:44:19.972042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-10T12:44:19.972164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:44:19.974720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768049060022, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:44:19.974871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768049060022 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:44:19.974898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:44:19.975219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-10T12:44:19.975254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-10T12:44:19.975397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:44:19.975441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:44:19.977974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:44:19.978024Z node 1 :FLAT_TX_SCHEMESHAR ... er_3_1_15076986534907163974_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 1768049090351, sizeLag# 280 2026-01-10T12:44:50.509055Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1TEvPartitionReady. Aval parts: 1 2026-01-10T12:44:50.509117Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 performing read request: guid# 59805653-1ebafe8c-290d4e88-d29c00fa, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-10T12:44:50.509274Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 59805653-1ebafe8c-290d4e88-d29c00fa 2026-01-10T12:44:50.511501Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1768049090351 CreateTimestampMS: 1768049090346 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1768049090380 CreateTimestampMS: 1768049090347 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1768049090384 CreateTimestampMS: 1768049090347 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551530 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2026-01-10T12:44:50.511660Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 3 2026-01-10T12:44:50.511702Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 59805653-1ebafe8c-290d4e88-d29c00fa has messages 1 2026-01-10T12:44:50.511785Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 read done: guid# 59805653-1ebafe8c-290d4e88-d29c00fa, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2026-01-10T12:44:50.511811Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 response to read: guid# 59805653-1ebafe8c-290d4e88-d29c00fa 2026-01-10T12:44:50.512055Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 Process answer. Aval parts: 0 2026-01-10T12:44:50.517970Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2026-01-10T12:44:50.518102Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2026-01-10T12:44:50.518391Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2026-01-10T12:44:50.518444Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] Returning serverBytesSize = 490 to budget 2026-01-10T12:44:50.518480Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2026-01-10T12:44:50.518759Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-10T12:44:50.520232Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:44:50.520313Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:44:50.520348Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-10T12:44:50.520396Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2026-01-10T12:44:50.520453Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:44:50.520609Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] Requesting status for partition stream id: 1 2026-01-10T12:44:50.520736Z :INFO: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] Closing read session. Close timeout: 0.000000s 2026-01-10T12:44:50.520791Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2026-01-10T12:44:50.520842Z :INFO: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 67 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:44:50.520982Z :NOTICE: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:44:50.521027Z :DEBUG: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] [] Abort session to cluster 2026-01-10T12:44:50.521614Z :NOTICE: [] [] [9ec13c2d-60f7b429-3b4816db-dba8176f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:44:50.525961Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2026-01-10T12:44:50.526027Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 grpc closed 2026-01-10T12:44:50.526067Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_15076986534907163974_v1 is DEAD 2026-01-10T12:44:50.528595Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_15076986534907163974_v1 2026-01-10T12:44:50.531226Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [3:7593713021727428693:2573] disconnected. 2026-01-10T12:44:50.531269Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [3:7593713021727428693:2573] disconnected; active server actors: 1 2026-01-10T12:44:50.531303Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [3:7593713021727428693:2573] client user disconnected session shared/user_3_1_15076986534907163974_v1 2026-01-10T12:44:50.601341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] state 'Ready' dataSize 824 rowCount 2 cpuUsage 0 2026-01-10T12:44:50.701807Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:44:50.701941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 46 shard idx 72057594046644480:2 data size 824 row count 2 2026-01-10T12:44:50.701986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037889 maps to shardIdx: 72057594046644480:2 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], pathId map=Cluster, is column=0, is olap=0, RowCount 2, DataSize 824 2026-01-10T12:44:50.702068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037889: SplitByLoadNotEnabledForTable 2026-01-10T12:44:50.703222Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:44:50.749686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 47] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-10T12:44:50.852511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:44:50.852645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 47 shard idx 72057594046644480:3 data size 0 row count 0 2026-01-10T12:44:50.852687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 47], pathId map=Topics, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-10T12:44:50.852773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037890: SplitByLoadNotEnabledForTable 2026-01-10T12:44:50.853816Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-10T12:44:50.915251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:44:50.915283Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:44:50.993653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 48] state 'Ready' dataSize 656 rowCount 2 cpuUsage 0 2026-01-10T12:44:51.095081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-10T12:44:51.095216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 48 shard idx 72057594046644480:4 data size 656 row count 2 2026-01-10T12:44:51.095258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037891 maps to shardIdx: 72057594046644480:4 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 48], pathId map=Versions, is column=0, is olap=0, RowCount 2, DataSize 656 2026-01-10T12:44:51.095338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037891: SplitByLoadNotEnabledForTable 2026-01-10T12:44:51.099778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:12.833666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:13.121213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:13.148318Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:13.149038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:13.149204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:14.090219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:14.090382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:14.098023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768048989321721 != 1768048989321724 2026-01-10T12:43:14.132653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:14.197228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:14.357291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:14.931850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:14.952799Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:15.118992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:15.299921Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:900:2774] 2026-01-10T12:43:15.300357Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:15.423994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:15.424222Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:15.427462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:43:15.427587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:43:15.427652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:43:15.434969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:15.435420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:15.435546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2774] in generation 1 2026-01-10T12:43:15.436096Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-10T12:43:15.436344Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:15.455811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:15.455981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:15.457586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-10T12:43:15.457668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-10T12:43:15.457739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-10T12:43:15.458197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:15.458502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:15.458563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2776] in generation 1 2026-01-10T12:43:15.458944Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-10T12:43:15.459164Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:15.469496Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-10T12:43:15.469743Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:15.480739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:15.480872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:15.482450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-10T12:43:15.482546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-10T12:43:15.482630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-10T12:43:15.483051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:15.483237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:15.483308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-10T12:43:15.483901Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:15.484029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:15.485584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-10T12:43:15.485662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-10T12:43:15.485711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-10T12:43:15.486010Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:15.486108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:15.486170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-10T12:43:15.499356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:15.556316Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:43:15.559241Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:15.559484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-10T12:43:15.559530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:43:15.559575Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:43:15.559616Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:15.564373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:15.564475Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-10T12:43:15.564585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:15.564686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-10T12:43:15.564718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-10T12:43:15.564769Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-10T12:43:15.564810Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:43:15.565353Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:15.565401Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-10T12:43:15.565465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:15.565540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-10T12:43:15.565573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-10T12:43:15.565597Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-10T12:43:15.565625Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-10T12:43:15.565881Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:43:15.566032Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:43:15.566175Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:15.566223Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-10T12:43:15.566280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: mis ... qpBuffer::TEvError in ExecuteState, status: ABORTED send to: [6:1189:2935] from: [6:1188:2935] trace_id# 2026-01-10T12:44:55.904171Z node 6 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1264} ActorId: [6:1189:2935] TxId: 281474976715664. Ctx: { TraceId: 01kekz0d3c09fwfx15cbyezq2m, Database: , SessionId: ydb://session/3?node_id=6&id=MzIwNDY0YTctNzU2N2IxN2ItYjE2ODY3MjktMzM4OGY0ZWY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-10T12:44:55.904695Z node 6 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=6&id=MzIwNDY0YTctNzU2N2IxN2ItYjE2ODY3MjktMzM4OGY0ZWY=, ActorId: [6:1133:2935], ActorState: ExecuteState, LegacyTraceId: 01kekz0d3c09fwfx15cbyezq2m, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-10T12:44:55.905258Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [6:68:2115] Handle TEvExecuteKqpTransaction 2026-01-10T12:44:55.905312Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [6:68:2115] TxId# 281474976715665 ProcessProposeKqpTransaction 2026-01-10T12:44:55.905622Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [6:1205:2988], Recipient [6:970:2825]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:44:55.905666Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:44:55.905707Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [6:1203:2987], serverId# [6:1205:2988], sessionId# [0:0:0] ... generic readset: Decision: DECISION_ABORT 2026-01-10T12:44:55.906065Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [6:885:2765], Recipient [6:970:2825]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:44:55.906103Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-10T12:44:55.906173Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2026-01-10T12:44:55.906256Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-10T12:44:55.906320Z node 6 :TX_DATASHARD TRACE: volatile_tx.cpp:884: Processed readset with decision 2 from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2026-01-10T12:44:55.906432Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-10T12:44:55.907120Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [6:1188:2935], Recipient [6:885:2765]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-10T12:44:55.907162Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-10T12:44:55.907431Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [6:885:2765], Recipient [6:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:44:55.907479Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:44:55.907561Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-10T12:44:55.907750Z node 6 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-10T12:44:55.907872Z node 6 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:44:55.907976Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-10T12:44:55.908051Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:44:55.908135Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:44:55.908189Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:44:55.908236Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:44:55.908349Z node 6 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3001/0 ImmediateWriteEdgeReplied# v3001/0 2026-01-10T12:44:55.908432Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-10T12:44:55.908484Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:44:55.908518Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:44:55.908574Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:44:55.908605Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:44:55.908636Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:44:55.908661Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:44:55.908685Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:44:55.908710Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:44:55.908758Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-10T12:44:55.908882Z node 6 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-10T12:44:55.908949Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-10T12:44:55.909034Z node 6 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:44:55.909125Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:44:55.909183Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:44:55.909262Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:44:55.909309Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:44:55.909351Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-10T12:44:55.909405Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:44:55.909473Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:44:55.909519Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:44:55.909596Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:44:55.909627Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:44:55.909659Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-10T12:44:55.909745Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:44:55.909822Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:44:55.909884Z node 6 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:44:55.909980Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:44:55.910604Z node 6 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715664; 2026-01-10T12:44:55.910836Z node 6 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3002 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-10T12:44:55.910938Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-10T12:44:55.911638Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [6:970:2825], Recipient [6:885:2765]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-10T12:44:55.911688Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-10T12:44:55.911768Z node 6 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2026-01-10T12:44:55.913020Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [6:70:2117], Recipient [6:885:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_rs/unittest |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |99.1%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> TopicSessionTests::SlowSession [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/http_api/py3test >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> TopicSessionTests::TwoSessionsWithDifferentSchemes |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive [GOOD] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |99.1%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> TopicSessionTests::RestartSessionIfQueryStopped >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-10T12:44:08.112835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593712841630809184:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:44:08.112942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:44:08.319191Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:44:08.354498Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7593712841073522187:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:44:08.354550Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:44:08.414441Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:44:08.852251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:08.857008Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:09.083709Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:09.128179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:44:09.168948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:44:09.169037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:44:09.368919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:44:09.369021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:44:09.373685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:44:09.373740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:44:09.418633Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:44:09.418808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:44:09.423771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:44:09.486729Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:44:09.489705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:44:09.563036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:44:09.684670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:44:10.283256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002d4e/r3tmp/yandexEi16c4.tmp 2026-01-10T12:44:10.283303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002d4e/r3tmp/yandexEi16c4.tmp 2026-01-10T12:44:10.286203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002d4e/r3tmp/yandexEi16c4.tmp 2026-01-10T12:44:10.286324Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:44:10.825505Z INFO: TTestServer started on Port 11421 GrpcPort 8713 PQClient connected to localhost:8713 === TenantModeEnabled() = 0 === Init PQ - start server on port 8713 2026-01-10T12:44:11.568289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:44:11.568446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:11.568629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:44:11.568666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:44:11.569429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:44:11.569502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:44:11.571825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:44:11.571957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:44:11.572117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:11.572151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:44:11.572165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:44:11.572175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:44:11.574559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:44:11.574583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:44:11.574601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:44:11.575835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:11.575878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:44:11.575902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:44:11.577773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:11.577821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:44:11.577841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:44:11.577880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:44:11.582428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:44:11.584618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:44:11.584722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:44:11.587072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768049051629, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:44:11.587203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768049051629 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:44:11.587231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:44:11.587611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:44:11.587655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 2814749767 ... etId: 72075186224037897 Generation: 1, pipe: [5:7593713079527306576:2588] 2026-01-10T12:45:04.610815Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_13934277665753573018_v1:1 with generation 1 2026-01-10T12:45:04.617102Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 WriteTimestampMS: 1768049104374 CreateTimestampMS: 1768049104368 SizeLag: 280 WriteTimestampEstimateMS: 1768049104590 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-10T12:45:04.617154Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2026-01-10T12:45:04.617221Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 sending to client partition status 2026-01-10T12:45:04.618042Z :INFO: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2026-01-10T12:45:04.618565Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2026-01-10T12:45:04.618690Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2026-01-10T12:45:04.618736Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2026-01-10T12:45:04.618766Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2026-01-10T12:45:04.618826Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1768049104374, sizeLag# 280 2026-01-10T12:45:04.618840Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1TEvPartitionReady. Aval parts: 1 2026-01-10T12:45:04.618886Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 performing read request: guid# afd56721-3babf23b-34dfb675-44aafc3a, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-10T12:45:04.618972Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid afd56721-3babf23b-34dfb675-44aafc3a 2026-01-10T12:45:04.620898Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1768049104374 CreateTimestampMS: 1768049104368 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1768049104384 CreateTimestampMS: 1768049104368 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1768049104485 CreateTimestampMS: 1768049104369 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 4 WriteTimestampMS: 1768049104485 CreateTimestampMS: 1768049104369 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551408 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-10T12:45:04.621076Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-10T12:45:04.621115Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid afd56721-3babf23b-34dfb675-44aafc3a has messages 1 2026-01-10T12:45:04.621208Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 read done: guid# afd56721-3babf23b-34dfb675-44aafc3a, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 531 2026-01-10T12:45:04.621235Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 response to read: guid# afd56721-3babf23b-34dfb675-44aafc3a 2026-01-10T12:45:04.621410Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 Process answer. Aval parts: 0 2026-01-10T12:45:04.622327Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Got ReadResponse, serverBytesSize = 531, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428269 2026-01-10T12:45:04.622450Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428269 2026-01-10T12:45:04.622756Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2026-01-10T12:45:04.622801Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Returning serverBytesSize = 531 to budget 2026-01-10T12:45:04.622840Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] In ContinueReadingDataImpl, ReadSizeBudget = 531, ReadSizeServerDelta = 52428269 2026-01-10T12:45:04.623057Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-10T12:45:04.623211Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:45:04.623248Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:45:04.623268Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-10T12:45:04.623284Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-10T12:45:04.623317Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] The application data is transferred to the client. Number of messages 4, size 32 bytes 2026-01-10T12:45:04.623348Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:45:04.623448Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Requesting status for partition stream id: 1 2026-01-10T12:45:04.623920Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc read done: success# 1, data# { read_request { bytes_size: 531 } } 2026-01-10T12:45:04.624028Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 got read request: guid# 652778b8-db5714c3-16b86ff1-f639982 2026-01-10T12:45:04.624674Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2026-01-10T12:45:04.624796Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 sending to client partition status 2026-01-10T12:45:04.726577Z :INFO: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] Closing read session. Close timeout: 0.000000s 2026-01-10T12:45:04.726674Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2026-01-10T12:45:04.726724Z :INFO: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 146 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:45:04.726833Z :NOTICE: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:45:04.726887Z :DEBUG: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] [] Abort session to cluster 2026-01-10T12:45:04.727435Z :NOTICE: [] [] [1b3c77e9-7d2e9b0d-26ff2261-8db01eba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:45:04.733121Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713079527306574:2585] disconnected. 2026-01-10T12:45:04.733167Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713079527306574:2585] disconnected; active server actors: 1 2026-01-10T12:45:04.733190Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713079527306574:2585] client user disconnected session shared/user_5_1_13934277665753573018_v1 2026-01-10T12:45:04.731161Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc read done: success# 0, data# { } 2026-01-10T12:45:04.731194Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc read failed 2026-01-10T12:45:04.731217Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 grpc closed 2026-01-10T12:45:04.731251Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_13934277665753573018_v1 is DEAD 2026-01-10T12:45:04.737123Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_13934277665753573018_v1 |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.1%| [TA] $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> KqpTpch::Query22 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:15.031245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:15.054670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:15.349300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:15.374719Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:15.390898Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:15.391574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:15.391687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:15.395296Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:15.395684Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:15.395736Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:16.365402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:16.656395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:16.656574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:16.657693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:16.657854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:16.709451Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:43:16.721536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:16.722163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:16.905880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:43:16.933569Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... waiting for the first mediator step 2026-01-10T12:43:17.880051Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:17.882266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... found first step to be 1500 2026-01-10T12:43:18.246477Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 1500 ... waiting for the next mediator step ... found second step to be 2000 ... read step subscribe result: [1500, 2000] ... read step subscribe update: 3000 2026-01-10T12:43:19.412900Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 3000 ... read step subscribe result: [3000, 3000] ... read step subscribe update: 3500 ... read step subscribe update: 3500 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe update: 7000 ... read step subscribe result: [3000, 7000] 2026-01-10T12:43:22.509758Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936129] NodeDisconnected NodeId# 2 2026-01-10T12:43:22.509842Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-10T12:43:22.509879Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 2 2026-01-10T12:43:22.509911Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037968897] NodeDisconnected NodeId# 2 2026-01-10T12:43:22.509947Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-10T12:43:22.510057Z node 1 :HIVE WARN: hive_impl.cpp:824: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2026-01-10T12:43:22.510148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2026-01-10T12:43:22.510712Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:124:2099] ServerId# [1:1134:2699] TabletId# 72057594037932033 PipeClientId# [2:124:2099] 2026-01-10T12:43:22.511161Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:258:2138] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-10T12:43:22.512577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2026-01-10T12:43:22.521932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:22.554014Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:43:22.555300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 ... read step subscribe update: 12000 ... waiting for SysViewsRoster update finished 2026-01-10T12:43:38.008679Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:38.008994Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:38.026404Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:38.027996Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:38.031247Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:38.031946Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:38.032326Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:38.034833Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:38.035060Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:38.035208Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:38.598799Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:38.744068Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:38.744635Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:38.745403Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:38.745480Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:38.822511Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-10T12:43:38.823542Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:38.823944Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:38.947238Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:43:38.991246Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:39.932068Z node 3 :TX_COORDINATOR DEBUG: coordinator__last_step_subscriptions.cpp:52: Processing TEvSubscribeLastStep from [4:1495:2378] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2026-01-10T12:43:40.001155Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:40.001276Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:40.386452Z ... ATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:45:28.204958Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000000 step# 200 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... coordinator 72057594046316545 gen 2 is planning step 200 2026-01-10T12:45:28.215629Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000000 has been planned 2026-01-10T12:45:28.215757Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:10:1:24576:168:0] response ... blocking put [72057594046316545:2:10:0:0:69:0] response ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 250 ... starting a new coordinator instance ... waiting for migrated state 2026-01-10T12:45:28.250404Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-10T12:45:28.251033Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-10T12:45:28.269209Z node 20 :TX_COORDINATOR INFO: coordinator__init.cpp:120: tablet# 72057594046316545 CreateTxInit Complete 2026-01-10T12:45:28.272047Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted ... blocking state response from [20:531:2394] to [20:873:2744] LastSentStep: 150 LastAcquiredStep: 0 LastConfirmedStep: 150 ... unblocking put responses and requests 2026-01-10T12:45:28.272922Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000000 stepId# 200 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy 2026-01-10T12:45:28.274956Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:322: tablet# 72057594046382081 server# [20:538:2479] disconnnected 2026-01-10T12:45:28.275158Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:201: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:538:2479] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2026-01-10T12:45:28.306395Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594046382081 server# [20:880:2754] connected 2026-01-10T12:45:28.306687Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2026-01-10T12:45:28.306752Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:876:2752] Cookie# 1 CompleteStep# 150 LatestKnownStep# 150 SubjectiveTime# 102 Coordinator# 72057594046316545 2026-01-10T12:45:28.307072Z node 20 :TX_COORDINATOR NOTICE: coordinator_impl.cpp:412: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2026-01-10T12:45:28.307130Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 200, txid# 10000000 marker# C2 2026-01-10T12:45:28.307236Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2026-01-10T12:45:28.307297Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000011 step# 250 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 200 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-10T12:45:28.312765Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 200 2026-01-10T12:45:28.312858Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [200] transactions [1] 2026-01-10T12:45:28.313019Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2026-01-10T12:45:28.313181Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2026-01-10T12:45:28.313262Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 200 forTablet# 72057594047365120 txid# 10000000 marker# M3 2026-01-10T12:45:28.313364Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 200 2026-01-10T12:45:28.313416Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 200 2026-01-10T12:45:28.313525Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 200 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}} marker# M4 2026-01-10T12:45:28.313682Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-10T12:45:28.313868Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-10T12:45:28.314404Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:884:2757] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-10T12:45:28.314495Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 200 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 250 2026-01-10T12:45:28.326637Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000011 has been planned 2026-01-10T12:45:28.326759Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2026-01-10T12:45:28.327634Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 250, txid# 10000011 marker# C2 2026-01-10T12:45:28.327744Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000011 stepId# 250 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 250 PrevStep: 200 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-10T12:45:28.328066Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 250 2026-01-10T12:45:28.328148Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [250] transactions [1] 2026-01-10T12:45:28.328325Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2026-01-10T12:45:28.328487Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2026-01-10T12:45:28.328594Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 250 forTablet# 72057594047365120 txid# 10000011 marker# M3 2026-01-10T12:45:28.328705Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 250 2026-01-10T12:45:28.328761Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 250 2026-01-10T12:45:28.328864Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 250 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}} marker# M4 2026-01-10T12:45:28.328949Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 250 MediatorId# 72057594046382081 TabletID 72057594047365120} 2026-01-10T12:45:28.329034Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} 2026-01-10T12:45:28.329214Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 300 ... observed step: Step: 300 PrevStep: 250 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-10T12:45:28.340393Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 250 To# 300Steps: {{TCoordinatorStep step# 300 PrevStep# 250}}} marker# M1 2026-01-10T12:45:28.340454Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 300 2026-01-10T12:45:28.340494Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 300 2026-01-10T12:45:28.340545Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} 2026-01-10T12:45:28.340597Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} |99.2%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/coordinator/ut/unittest >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/tests/kikimr_tpch/unittest >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TopicSessionTests::WrongJson |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsHistogram::Ten_Mixed |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTopicReaderTests::TestRun_ReadMessages_With_Offset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> TopicSessionTests::WrongJson [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period [GOOD] >> TopicSessionTests::WrongJsonOffset >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BulkUpsert::BulkUpsert [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 1968 30% 1968 50% 1968 90% 1968 99% 1968 ms Write: 10% 4704 30% 4704 50% 4704 90% 4704 99% 4704 ms Write: 10% 6211 30% 6211 50% 6211 90% 6211 99% 6211 ms Write: 10% 5688 30% 5688 50% 5688 90% 5688 99% 5688 ms Write: 10% 6187 30% 6187 50% 6187 90% 6187 99% 6187 ms Write: 10% 7141 30% 7141 50% 7141 90% 7141 99% 7141 ms Write: 10% 11274 30% 11274 50% 11274 90% 11274 99% 11274 ms Write: 10% 11116 30% 11116 50% 11116 90% 11116 99% 11116 ms Write: 10% 14499 30% 14499 50% 14499 90% 14499 99% 14499 ms Write: 10% 15307 30% 15307 50% 15307 90% 15307 99% 15307 ms Write: 10% 18224 30% 18224 50% 18224 90% 18224 99% 18224 ms Write: 10% 17291 30% 17291 50% 17291 90% 17291 99% 17291 ms Write: 10% 18152 30% 18152 50% 18152 90% 18152 99% 18152 ms Write: 10% 19479 30% 19479 50% 19479 90% 19479 99% 19479 ms Write: 10% 19914 30% 19914 50% 19914 90% 19914 99% 19914 ms Write: 10% 22073 30% 22073 50% 22073 90% 22073 99% 22073 ms Write: 10% 20514 30% 20514 50% 20514 90% 20514 99% 20514 ms Write: 10% 19662 30% 19662 50% 19662 90% 19662 99% 19662 ms Write: 10% 17026 30% 17026 50% 17026 90% 17026 99% 17026 ms Write: 10% 18671 30% 18671 50% 18671 90% 18671 99% 18671 ms Write: 10% 18195 30% 18195 50% 18195 90% 18195 99% 18195 ms Write: 10% 17848 30% 17848 50% 17848 90% 17848 99% 17848 ms Write: 10% 12604 30% 12604 50% 12604 90% 12604 99% 12604 ms Write: 10% 13041 30% 13041 50% 13041 90% 13041 99% 13041 ms Write: 10% 18956 30% 18956 50% 18956 90% 18956 99% 18956 ms Write: 10% 12134 30% 12134 50% 12134 90% 12134 99% 12134 ms Write: 10% 11402 30% 11402 50% 11402 90% 11402 99% 11402 ms Write: 10% 16125 30% 16125 50% 16125 90% 16125 99% 16125 ms Write: 10% 11227 30% 11227 50% 11227 90% 11227 99% 11227 ms Write: 10% 16329 30% 16329 50% 16329 90% 16329 99% 16329 ms Write: 10% 10368 30% 10368 50% 10368 90% 10368 99% 10368 ms Write: 10% 3004 30% 3004 50% 3004 90% 3004 99% 3004 ms Write: 10% 6457 30% 6457 50% 6457 90% 6457 99% 6457 ms Write: 10% 2435 30% 2435 50% 2435 90% 2435 99% 2435 ms Write: 10% 2036 30% 2036 50% 2036 90% 2036 99% 2036 ms Write: 10% 14565 30% 14565 50% 14565 90% 14565 99% 14565 ms Write: 10% 17496 30% 17496 50% 17496 90% 17496 99% 17496 ms Write: 10% 4467 30% 4467 50% 4467 90% 4467 99% 4467 ms Write: 10% 15143 30% 15143 50% 15143 90% 15143 99% 15143 ms Write: 10% 19273 30% 19273 50% 19273 90% 19273 99% 19273 ms Write: 10% 5068 30% 5068 50% 5068 90% 5068 99% 5068 ms Write: 10% 5301 30% 5301 50% 5301 90% 5301 99% 5301 ms Write: 10% 4611 30% 4611 50% 4611 90% 4611 99% 4611 ms Write: 10% 15739 30% 15739 50% 15739 90% 15739 99% 15739 ms Write: 10% 11408 30% 11408 50% 11408 90% 11408 99% 11408 ms Write: 10% 4629 30% 4629 50% 4629 90% 4629 99% 4629 ms Write: 10% 5702 30% 5702 50% 5702 90% 5702 99% 5702 ms Write: 10% 5978 30% 5978 50% 5978 90% 5978 99% 5978 ms Write: 10% 4623 30% 4623 50% 4623 90% 4623 99% 4623 ms Write: 10% 9469 30% 9469 50% 9469 90% 9469 99% 9469 ms Write: 10% 3178 30% 3178 50% 3178 90% 3178 99% 3178 ms Write: 10% 4161 30% 4161 50% 4161 90% 4161 99% 4161 ms Write: 10% 17622 30% 17622 50% 17622 90% 17622 99% 17622 ms Write: 10% 4260 30% 4260 50% 4260 90% 4260 99% 4260 ms Write: 10% 3896 30% 3896 50% 3896 90% 3896 99% 3896 ms Write: 10% 3638 30% 3638 50% 3638 90% 3638 99% 3638 ms Write: 10% 3785 30% 3785 50% 3785 90% 3785 99% 3785 ms Write: 10% 3268 30% 3268 50% 3268 90% 3268 99% 3268 ms Write: 10% 3328 30% 3328 50% 3328 90% 3328 99% 3328 ms Write: 10% 3311 30% 3311 50% 3311 90% 3311 99% 3311 ms Write: 10% 15016 30% 15016 50% 15016 90% 15016 99% 15016 ms Write: 10% 2757 30% 2757 50% 2757 90% 2757 99% 2757 ms Write: 10% 13317 30% 13317 50% 13317 90% 13317 99% 13317 ms Write: 10% 7915 30% 7915 50% 7915 90% 7915 99% 7915 ms Step 2. read write Write: 10% 3196 30% 3196 50% 3196 90% 3196 99% 3196 ms Write: 10% 6295 30% 6295 50% 6295 90% 6295 99% 6295 ms Write: 10% 10434 30% 10434 50% 10434 90% 10434 99% 10434 ms Write: 10% 15033 30% 15033 50% 15033 90% 15033 99% 15033 ms Write: 10% 17912 30% 17912 50% 17912 90% 17912 99% 17912 ms Write: 10% 19421 30% 19421 50% 19421 90% 19421 99% 19421 ms Write: 10% 20503 30% 20503 50% 20503 90% 20503 99% 20503 ms Write: 10% 16745 30% 16745 50% 16745 90% 16745 99% 16745 ms Write: 10% 20626 30% 20626 50% 20626 90% 20626 99% 20626 ms Write: 10% 21155 30% 21155 50% 21155 90% 21155 99% 21155 ms Write: 10% 19994 30% 19994 50% 19994 90% 19994 99% 19994 ms Write: 10% 20005 30% 20005 50% 20005 90% 20005 99% 20005 ms Write: 10% 21351 30% 21351 50% 21351 90% 21351 99% 21351 ms Write: 10% 19843 30% 19843 50% 19843 90% 19843 99% 19843 ms Write: 10% 20012 30% 20012 50% 20012 90% 20012 99% 20012 ms Write: 10% 20253 30% 20253 50% 20253 90% 20253 99% 20253 ms Write: 10% 11397 30% 11397 50% 11397 90% 11397 99% 11397 ms Write: 10% 19065 30% 19065 50% 19065 90% 19065 99% 19065 ms Write: 10% 11030 30% 11030 50% 11030 90% 11030 99% 11030 ms Write: 10% 10481 30% 10481 50% 10481 90% 10481 99% 10481 ms Write: 10% 8719 30% 8719 50% 8719 90% 8719 99% 8719 ms Write: 10% 19172 30% 19172 50% 19172 90% 19172 99% 19172 ms Write: 10% 16456 30% 16456 50% 16456 90% 16456 99% 16456 ms Write: 10% 19273 30% 19273 50% 19273 90% 19273 99% 19273 ms Write: 10% 9726 30% 9726 50% 9726 90% 9726 99% 9726 ms Write: 10% 10847 30% 10847 50% 10847 90% 10847 99% 10847 ms Write: 10% 13580 30% 13580 50% 13580 90% 13580 99% 13580 ms Write: 10% 15949 30% 15949 50% 15949 90% 15949 99% 15949 ms Write: 10% 8585 30% 8585 50% 8585 90% 8585 99% 8585 ms Write: 10% 8957 30% 8957 50% 8957 90% 8957 99% 8957 ms Write: 10% 13094 30% 13094 50% 13094 90% 13094 99% 13094 ms Write: 10% 15569 30% 15569 50% 15569 90% 15569 99% 15569 ms Write: 10% 16512 30% 16512 50% 16512 90% 16512 99% 16512 ms Write: 10% 12432 30% 12432 50% 12432 90% 12432 99% 12432 ms Write: 10% 16732 30% 16732 50% 16732 90% 16732 99% 16732 ms Write: 10% 21333 30% 21333 50% 21333 90% 21333 99% 21333 ms Write: 10% 21677 30% 21677 50% 21677 90% 21677 99% 21677 ms Write: 10% 10627 30% 10627 50% 10627 90% 10627 99% 10627 ms Write: 10% 11595 30% 11595 50% 11595 90% 11595 99% 11595 ms Write: 10% 19529 30% 19529 50% 19529 90% 19529 99% 19529 ms Write: 10% 4303 30% 4303 50% 4303 90% 4303 99% 4303 ms Write: 10% 5534 30% 5534 50% 5534 90% 5534 99% 5534 ms Write: 10% 10812 30% 10812 50% 10812 90% 10812 99% 10812 ms Write: 10% 4554 30% 4554 50% 4554 90% 4554 99% 4554 ms Write: 10% 7140 30% 7140 50% 7140 90% 7140 99% 7140 ms Write: 10% 9566 30% 9566 50% 9566 90% 9566 99% 9566 ms Write: 10% 4316 30% 4316 50% 4316 90% 4316 99% 4316 ms Write: 10% 8781 30% 8781 50% 8781 90% 8781 99% 8781 ms Write: 10% 5157 30% 5157 50% 5157 90% 5157 99% 5157 ms Write: 10% 3520 30% 3520 50% 3520 90% 3520 99% 3520 ms Write: 10% 6772 30% 6772 50% 6772 90% 6772 99% 6772 ms Write: 10% 6976 30% 6976 50% 6976 90% 6976 99% 6976 ms Write: 10% 3410 30% 3410 50% 3410 90% 3410 99% 3410 ms Write: 10% 9400 30% 9400 50% 9400 90% 9400 99% 9400 ms Write: 10% 4887 30% 4887 50% 4887 90% 4887 99% 4887 ms Write: 10% 4217 30% 4217 50% 4217 90% 4217 99% 4217 ms Write: 10% 4187 30% 4187 50% 4187 90% 4187 99% 4187 ms Write: 10% 8290 30% 8290 50% 8290 90% 8290 99% 8290 ms Write: 10% 6146 30% 6146 50% 6146 90% 6146 99% 6146 ms Write: 10% 4995 30% 4995 50% 4995 90% 4995 99% 4995 ms Write: 10% 5839 30% 5839 50% 5839 90% 5839 99% 5839 ms Write: 10% 5101 30% 5101 50% 5101 90% 5101 99% 5101 ms Write: 10% 5458 30% 5458 50% 5458 90% 5458 99% 5458 ms Write: 10% 8503 30% 8503 50% 8503 90% 8503 99% 8503 ms Read: 10% 8561 30% 13258 50% 17954 90% 27348 99% 29461 ms Step 3. write modify Was written: 14.6484375 MiB, Speed: 0.244140625 MiB/s Write: 10% 11740 30% 11740 50% 11740 90% 11740 99% 11740 ms Write: 10% 16167 30% 16167 50% 16167 90% 16167 99% 16167 ms Write: 10% 20641 30% 20641 50% 20641 90% 20641 99% 20641 ms Write: 10% 22250 30% 22250 50% 22250 90% 22250 99% 22250 ms Write: 10% 21358 30% 21358 50% 21358 90% 21358 99% 21358 ms Write: 10% 21011 30% 21011 50% 21011 90% 21011 99% 21011 ms Write: 10% 20824 30% 20824 50% 20824 90% 20824 99% 20824 ms Write: 10% 21272 30% 21272 50% 21272 90% 21272 99% 21272 ms Write: 10% 21558 30% 21558 50% 21558 90% 21558 99% 21558 ms Write: 10% 20545 30% 20545 50% 20545 90% 20545 99% 20545 ms Write: 10% 21273 30% 21273 50% 21273 90% 21273 99% 21273 ms Write: 10% 20177 30% 20177 50% 20177 90% 20177 99% 20177 ms Write: 10% 20126 30% 20126 50% 20126 90% 20126 99% 20126 ms Write: 10% 19470 30% 19470 50% 19470 90% 19470 99% 19470 ms Write: 10% 6017 30% 6017 50% 6017 90% 6017 99% 6017 ms Write: 10% 4276 30% 4276 50% 4276 90% 4276 99% 4276 ms Write: 10% 14318 30% 14318 50% 14318 90% 14318 99% 14318 ms Write: 10% 17717 30% 17717 50% 17717 90% 17717 99% 17717 ms Write: 10% 11355 30% 11355 50% 11355 90% 11355 99% 11355 ms Write: 10% 17881 30% 17881 50% 17881 90% 17881 99% 17881 ms Write: 10% 17584 30% 17584 50% 17584 90% 17584 99% 17584 ms Write: 10% 11323 30% 11323 50% 11323 90% 11323 99% 11323 ms Write: 10% 9735 30% 9735 50% 9735 90% 9735 99% 9735 ms Write: 10% 6939 30% 6939 50% 6939 90% 6939 99% 6939 ms Write: 10% 15451 30% 15451 50% 15451 90% 15451 99% 15451 ms Write: 10% 14958 30% 14958 50% 14958 90% 14958 99% 14958 ms Write: 10% 6467 30% 6467 50% 6467 90% 6467 99% 6467 ms Write: 10% 19035 30% 19035 50% 19035 90% 19035 99% 19035 ms Write: 10% 14218 30% 14218 50% 14218 90% 14218 99% 14218 ms Write: 10% 6220 30% 6220 50% 6220 90% 6220 99% 6220 ms Write: 10% 5059 30% 5059 50% 5059 90% 5059 99% 5059 ms Write: 10% 18833 30% 18833 50% 18833 90% 18833 99% 18833 ms Write: 10% 9578 30% 9578 50% 9578 90% 9578 99% 9578 ms Write: 10% 4848 30% 4848 50% 4848 90% 4848 99% 4848 ms Write: 10% 11797 30% 11797 50% 11797 90% 11797 99% 11797 ms Write: 10% 11624 30% 11624 50% 11624 90% 11624 99% 11624 ms Write: 10% 13862 30% 13862 50% 13862 90% 13862 99% 13862 ms Write: 10% 19931 30% 19931 50% 19931 90% 19931 99% 19931 ms Write: 10% 16979 30% 16979 50% 16979 90% 16979 99% 16979 ms Write: 10% 10276 30% 10276 50% 10276 90% 10276 99% 10276 ms Write: 10% 16039 30% 16039 50% 16039 90% 16039 99% 16039 ms Write: 10% 3857 30% 3857 50% 3857 90% 3857 99% 3857 ms Write: 10% 9709 30% 9709 50% 9709 90% 9709 99% 9709 ms Write: 10% 8813 30% 8813 50% 8813 90% 8813 99% 8813 ms Write: 10% 8259 30% 8259 50% 8259 90% 8259 99% 8259 ms Write: 10% 16583 30% 16583 50% 16583 90% 16583 99% 16583 ms Write: 10% 9454 30% 9454 50% 9454 90% 9454 99% 9454 ms Write: 10% 4037 30% 4037 50% 4037 90% 4037 99% 4037 ms Write: 10% 5216 30% 5216 50% 5216 90% 5216 99% 5216 ms Write: 10% 19305 30% 19305 50% 19305 90% 19305 99% 19305 ms Write: 10% 5691 30% 5691 50% 5691 90% 5691 99% 5691 ms Write: 10% 5312 30% 5312 50% 5312 90% 5312 99% 5312 ms Write: 10% 13974 30% 13974 50% 13974 90% 13974 99% 13974 ms Write: 10% 5415 30% 5415 50% 5415 90% 5415 99% 5415 ms Write: 10% 4735 30% 4735 50% 4735 90% 4735 99% 4735 ms Write: 10% 4319 30% 4319 50% 4319 90% 4319 99% 4319 ms Write: 10% 4426 30% 4426 50% 4426 90% 4426 99% 4426 ms Write: 10% 5173 30% 5173 50% 5173 90% 5173 99% 5173 ms Write: 10% 4386 30% 4386 50% 4386 90% 4386 99% 4386 ms Write: 10% 9305 30% 9305 50% 9305 90% 9305 99% 9305 ms Write: 10% 4264 30% 4264 50% 4264 90% 4264 99% 4264 ms Write: 10% 5447 30% 5447 50% 5447 90% 5447 99% 5447 ms Write: 10% 6609 30% 6609 50% 6609 90% 6609 99% 6609 ms Write: 10% 5294 30% 5294 50% 5294 90% 5294 99% 5294 ms Update: 10% 1850 30% 1982 50% 2114 90% 2378 99% 2437 ms Step 4. read modify write Write: 10% 11160 30% 11160 50% 11160 90% 11160 99% 11160 ms Write: 10% 13532 30% 13532 50% 13532 90% 13532 99% 13532 ms Write: 10% 12770 30% 12770 50% 12770 90% 12770 99% 12770 ms Write: 10% 17142 30% 17142 50% 17142 90% 17142 99% 17142 ms Write: 10% 17972 30% 17972 50% 17972 90% 17972 99% 17972 ms Write: 10% 18390 30% 18390 50% 18390 90% 18390 99% 18390 ms Write: 10% 19019 30% 19019 50% 19019 90% 19019 99% 19019 ms Write: 10% 17455 30% 17455 50% 17455 90% 17455 99% 17455 ms Write: 10% 16165 30% 16165 50% 16165 90% 16165 99% 16165 ms Write: 10% 19529 30% 19529 50% 19529 90% 19529 99% 19529 ms Write: 10% 18802 30% 18802 50% 18802 90% 18802 99% 18802 ms Write: 10% 13915 30% 13915 50% 13915 90% 13915 99% 13915 ms Write: 10% 12039 30% 12039 50% 12039 90% 12039 99% 12039 ms Was written: 25.0 MiB, Speed: 0.17252604166666666 MiB/s Write: 10% 14694 30% 14694 50% 14694 90% 14694 99% 14694 ms Write: 10% 16091 30% 16091 50% 16091 90% 16091 99% 16091 ms Write: 10% 16066 30% 16066 50% 16066 90% 16066 99% 16066 ms Write: 10% 17361 30% 17361 50% 17361 90% 17361 99% 17361 ms Write: 10% 15607 30% 15607 50% 15607 90% 15607 99% 15607 ms Write: 10% 15036 30% 15036 50% 15036 90% 15036 99% 15036 ms Write: 10% 17408 30% 17408 50% 17408 90% 17408 99% 17408 ms Write: 10% 14894 30% 14894 50% 14894 90% 14894 99% 14894 ms Write: 10% 5623 30% 5623 50% 5623 90% 5623 99% 5623 ms Write: 10% 6307 30% 6307 50% 6307 90% 6307 99% 6307 ms Write: 10% 13699 30% 13699 50% 13699 90% 13699 99% 13699 ms Write: 10% 6810 30% 6810 50% 6810 90% 6810 99% 6810 ms Write: 10% 17032 30% 17032 50% 17032 90% 17032 99% 17032 ms Write: 10% 4794 30% 4794 50% 4794 90% 4794 99% 4794 ms Write: 10% 5323 30% 5323 50% 5323 90% 5323 99% 5323 ms Write: 10% 16074 30% 16074 50% 16074 90% 16074 99% 16074 ms Write: 10% 5737 30% 5737 50% 5737 90% 5737 99% 5737 ms Write: 10% 14341 30% 14341 50% 14341 90% 14341 99% 14341 ms Write: 10% 4369 30% 4369 50% 4369 90% 4369 99% 4369 ms Write: 10% 12935 30% 12935 50% 12935 90% 12935 99% 12935 ms Write: 10% 3892 30% 3892 50% 3892 90% 3892 99% 3892 ms Write: 10% 12029 30% 12029 50% 12029 90% 12029 99% 12029 ms Write: 10% 7876 30% 7876 50% 7876 90% 7876 99% 7876 ms Write: 10% 12747 30% 12747 50% 12747 90% 12747 99% 12747 ms Write: 10% 11597 30% 11597 50% 11597 90% 11597 99% 11597 ms Write: 10% 10482 30% 10482 50% 10482 90% 10482 99% 10482 ms Write: 10% 8929 30% 8929 50% 8929 90% 8929 99% 8929 ms Write: 10% 4013 30% 4013 50% 4013 90% 4013 99% 4013 ms Write: 10% 8721 30% 8721 50% 8721 90% 8721 99% 8721 ms Write: 10% 9098 30% 9098 50% 9098 90% 9098 99% 9098 ms Write: 10% 7918 30% 7918 50% 7918 90% 7918 99% 7918 ms Write: 10% 7269 30% 7269 50% 7269 90% 7269 99% 7269 ms Write: 10% 6859 30% 6859 50% 6859 90% 6859 99% 6859 ms Write: 10% 7978 30% 7978 50% 7978 90% 7978 99% 7978 ms Write: 10% 8274 30% 8274 50% 8274 90% 8274 99% 8274 ms Write: 10% 5752 30% 5752 50% 5752 90% 5752 99% 5752 ms Write: 10% 6300 30% 6300 50% 6300 90% 6300 99% 6300 ms Write: 10% 6710 30% 6710 50% 6710 90% 6710 99% 6710 ms Write: 10% 13985 30% 13985 50% 13985 90% 13985 99% 13985 ms Write: 10% 14582 30% 14582 50% 14582 90% 14582 99% 14582 ms Write: 10% 5197 30% 5197 50% 5197 90% 5197 99% 5197 ms Write: 10% 14036 30% 14036 50% 14036 90% 14036 99% 14036 ms Write: 10% 13533 30% 13533 50% 13533 90% 13533 99% 13533 ms Write: 10% 4487 30% 4487 50% 4487 90% 4487 99% 4487 ms Write: 10% 4736 30% 4736 50% 4736 90% 4736 99% 4736 ms Write: 10% 4492 30% 4492 50% 4492 90% 4492 99% 4492 ms Write: 10% 6698 30% 6698 50% 6698 90% 6698 99% 6698 ms Write: 10% 7632 30% 7632 50% 7632 90% 7632 99% 7632 ms Write: 10% 5122 30% 5122 50% 5122 90% 5122 99% 5122 ms Write: 10% 6317 30% 6317 50% 6317 90% 6317 99% 6317 ms Write: 10% 4871 30% 4871 50% 4871 90% 4871 99% 4871 ms Read: 10% 7653 30% 12080 50% 16508 90% 25363 99% 27355 ms Update: 10% 1459 30% 1459 50% 1459 90% 1459 99% 1459 ms |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |99.3%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> TopicSessionTests::WrongJsonOffset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::TwoClientOneSession [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-10T12:45:12.357700Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7593713115774538511:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-10T12:45:12.357972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-10T12:45:12.392218Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007100s 2026-01-10T12:45:12.441179Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:45:12.457808Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-10T12:45:12.752569Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:45:12.752791Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:45:12.774358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-10T12:45:12.794746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:45:12.794888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:45:12.801545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:45:12.801728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:45:12.811751Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-10T12:45:12.811951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:45:12.824429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:45:12.934678Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:45:12.959323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:45:13.070506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-10T12:45:13.133083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/d9xi/002c43/r3tmp/yandexZbGGIQ.tmp 2026-01-10T12:45:13.133110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/d9xi/002c43/r3tmp/yandexZbGGIQ.tmp 2026-01-10T12:45:13.133315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/d9xi/002c43/r3tmp/yandexZbGGIQ.tmp 2026-01-10T12:45:13.133766Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-10T12:45:13.213221Z INFO: TTestServer started on Port 12459 GrpcPort 28428 2026-01-10T12:45:13.377025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:45:13.445219Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:28428 === TenantModeEnabled() = 0 === Init PQ - start server on port 28428 2026-01-10T12:45:13.699262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-10T12:45:13.699444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:45:13.699621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-10T12:45:13.699640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-10T12:45:13.699788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-10T12:45:13.699824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:45:13.708874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-10T12:45:13.709011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-10T12:45:13.709159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:45:13.709198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-10T12:45:13.709212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-10T12:45:13.709228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-10T12:45:13.724570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:45:13.724612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-10T12:45:13.724629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-10T12:45:13.732372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:45:13.732424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-10T12:45:13.732447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-10T12:45:13.736896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:45:13.736930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-10T12:45:13.736960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:45:13.737000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-10T12:45:13.746946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-10T12:45:13.749485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-10T12:45:13.749594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-10T12:45:13.754137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1768049113796, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-10T12:45:13.754278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1768049113796 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-10T12:45:13.754308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:45:13.754570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-10T12:45:13.754600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-10T12:45:13.754734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-10T12:45:13.754773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-10T12:45:13.760825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-10T12:45:13.760855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 720575940466444 ... UG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2026-01-10T12:45:56.581872Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:941: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 read init: from# ipv6:[::1]:46442, request# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2026-01-10T12:45:56.582448Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 auth for : user 2026-01-10T12:45:56.583172Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 Handle describe topics response 2026-01-10T12:45:56.583297Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 auth is DEAD 2026-01-10T12:45:56.583318Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 auth ok: topics# 1, initDone# 0 2026-01-10T12:45:56.585894Z :INFO: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] [] Got InitResponse. ReadSessionId: shared/user_5_1_2334794710398483937_v1 2026-01-10T12:45:56.585942Z :DEBUG: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:45:56.586142Z :DEBUG: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-10T12:45:56.587445Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713306979546074:2575] connected; active server actors: 1 2026-01-10T12:45:56.584716Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 register session: topic# rt3.dc1--topic1 2026-01-10T12:45:56.588014Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037898][rt3.dc1--topic1] consumer "user" register session for pipe [5:7593713306979546074:2575] session shared/user_5_1_2334794710398483937_v1 2026-01-10T12:45:56.588090Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037898][rt3.dc1--topic1] consumer user register readable partition 0 2026-01-10T12:45:56.588147Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037898][rt3.dc1--topic1] consumer user family created family=1 (Status=Free, Partitions=[0]) 2026-01-10T12:45:56.588194Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037898][rt3.dc1--topic1] consumer user register reading session ReadingSession "shared/user_5_1_2334794710398483937_v1" (Sender=[5:7593713306979546071:2575], Pipe=[5:7593713306979546074:2575], Partitions=[], ActiveFamilyCount=0) 2026-01-10T12:45:56.588222Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037898][rt3.dc1--topic1] consumer user rebalancing was scheduled 2026-01-10T12:45:56.588271Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037898][rt3.dc1--topic1] consumer user balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-10T12:45:56.588323Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037898][rt3.dc1--topic1] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_5_1_2334794710398483937_v1" (Sender=[5:7593713306979546071:2575], Pipe=[5:7593713306979546074:2575], Partitions=[], ActiveFamilyCount=0) 2026-01-10T12:45:56.588383Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037898][rt3.dc1--topic1] consumer user family 1 status Active partitions [0] session "shared/user_5_1_2334794710398483937_v1" sender [5:7593713306979546071:2575] lock partition 0 for ReadingSession "shared/user_5_1_2334794710398483937_v1" (Sender=[5:7593713306979546071:2575], Pipe=[5:7593713306979546074:2575], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-10T12:45:56.588442Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037898][rt3.dc1--topic1] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-10T12:45:56.588472Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037898][rt3.dc1--topic1] consumer user balancing duration: 0.000179s 2026-01-10T12:45:56.588466Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2026-01-10T12:45:56.588595Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 got read request: guid# 796e057e-4adf7a98-f8959dae-cb8288a4 2026-01-10T12:45:56.589672Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/user_5_1_2334794710398483937_v1" ClientId: "user" PipeClient { RawX1: 7593713306979546074 RawX2: 4503621102209551 } Path: "/Root/PQ/rt3.dc1--topic1" } 2026-01-10T12:45:56.589787Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2026-01-10T12:45:56.590214Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7593713306979546076:2578] 2026-01-10T12:45:56.591157Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_2334794710398483937_v1:1 with generation 1 2026-01-10T12:45:56.593713Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 6 WriteTimestampMS: 1768049156461 CreateTimestampMS: 1768049156458 SizeLag: 280 WriteTimestampEstimateMS: 1768049156567 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-10T12:45:56.593762Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 2026-01-10T12:45:56.593831Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 sending to client partition status 2026-01-10T12:45:56.594538Z :INFO: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 10 2026-01-10T12:45:56.595189Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 10 } } 2026-01-10T12:45:56.595320Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 10, commitOffset# (empty maybe) 2026-01-10T12:45:56.595370Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 10 2026-01-10T12:45:56.595421Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 10 2026-01-10T12:45:56.674023Z :INFO: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] Closing read session. Close timeout: 0.000000s 2026-01-10T12:45:56.674121Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2026-01-10T12:45:56.674186Z :INFO: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] Counters: { Errors: 0 CurrentSessionLifetimeMs: 105 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:45:56.674335Z :NOTICE: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:45:56.674405Z :DEBUG: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] [] Abort session to cluster 2026-01-10T12:45:56.675005Z :NOTICE: [] [] [9a04e146-b52d8a7a-73b7830a-c465e815] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:45:56.679800Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 grpc read done: success# 0, data# { } 2026-01-10T12:45:56.679827Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 grpc read failed 2026-01-10T12:45:56.679856Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 grpc closed 2026-01-10T12:45:56.679895Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_2334794710398483937_v1 is DEAD 2026-01-10T12:45:56.680247Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_2334794710398483937_v1 2026-01-10T12:45:56.683582Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713306979546074:2575] disconnected. 2026-01-10T12:45:56.683622Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713306979546074:2575] disconnected; active server actors: 1 2026-01-10T12:45:56.683644Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7593713306979546074:2575] client user disconnected session shared/user_5_1_2334794710398483937_v1 2026-01-10T12:45:57.238826Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] state 'Ready' dataSize 824 rowCount 2 cpuUsage 0 |99.3%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> RowDispatcherTests::TwoClients4Sessions |99.3%| [TA] $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> RowDispatcherTests::TwoClientTwoConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> RowDispatcherTests::SessionFatalError |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> RowDispatcherTests::SessionFatalError [GOOD] |99.3%| [TA] {RESULT} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> test_canonical_requests.py::Test::test_cluster_get_set |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::SessionFatalError [GOOD] Test command err: 2026-01-10T12:43:19.927000Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [1:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-10T12:43:19.927398Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-10T12:43:19.927480Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [1:25:2054] 2026-01-10T12:43:19.927534Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [1:25:2054] 2026-01-10T12:43:19.927568Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [1:25:2054] 2026-01-10T12:43:19.927607Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:26:2054] 2026-01-10T12:43:19.927628Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:26:2054] 2026-01-10T12:43:19.927667Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:19.927703Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:26:2054] 2026-01-10T12:43:19.927736Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [3:27:2054] 2026-01-10T12:43:19.927761Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [3:27:2054] 2026-01-10T12:43:19.927783Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:19.927803Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-10T12:43:19.927829Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [3:27:2054] 2026-01-10T12:43:19.927956Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-10T12:43:19.928105Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-10T12:43:19.929115Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2026-01-10T12:43:19.929182Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-10T12:43:19.946783Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-10T12:43:19.946916Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-10T12:43:19.947070Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:32:2055] 2026-01-10T12:43:19.947109Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:32:2055] 2026-01-10T12:43:19.947155Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:32:2055] 2026-01-10T12:43:19.947203Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:32:2055] 2026-01-10T12:43:19.947266Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:33:2056] 2026-01-10T12:43:19.947303Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:33:2056] 2026-01-10T12:43:19.947355Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:32:2055] to new [2:33:2056] 2026-01-10T12:43:19.947383Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:33:2056] 2026-01-10T12:43:19.947456Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-10T12:43:19.947506Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-10T12:43:19.947637Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-10T12:43:19.947706Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-10T12:43:20.177726Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [5:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-10T12:43:20.178199Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-10T12:43:20.178259Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [5:25:2054] 2026-01-10T12:43:20.178298Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [5:25:2054] 2026-01-10T12:43:20.178329Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [5:25:2054] 2026-01-10T12:43:20.178758Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [6:26:2054] 2026-01-10T12:43:20.178789Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [6:26:2054] 2026-01-10T12:43:20.178821Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:20.178849Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [6:26:2054] 2026-01-10T12:43:20.178882Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [7:27:2054] 2026-01-10T12:43:20.178929Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [7:27:2054] 2026-01-10T12:43:20.178962Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:20.178984Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-10T12:43:20.179010Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [7:27:2054] 2026-01-10T12:43:20.179109Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2026-01-10T12:43:20.179237Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2026-01-10T12:43:20.179401Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2026-01-10T12:43:20.179473Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2026-01-10T12:43:20.387485Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [9:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-10T12:43:20.397193Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-10T12:43:20.397287Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [10:26:2054] 2026-01-10T12:43:20.397327Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [10:26:2054] 2026-01-10T12:43:20.397359Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:20.397408Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [10:26:2054] 2026-01-10T12:43:20.397517Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [9:28:2055], topic1, partIds: 0 2026-01-10T12:43:20.397616Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:514: Coordinator: Not all nodes connected, nodes count: 3, known rd count: 2, add request into pending queue 2026-01-10T12:43:21.398421Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [11:27:2054] 2026-01-10T12:43:21.398520Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [11:27:2054] 2026-01-10T12:43:21.398557Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:21.398610Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-10T12:43:21.398743Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [9:28:2055] 2026-01-10T12:43:21.398811Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [11:27:2054] 2026-01-10T12:43:21.670442Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [13:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-10T12:43:21.670766Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-10T12:43:21.670858Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [13:25:2054] 2026-01-10T12:43:21.670884Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [13:25:2054] 2026-01-10T12:43:21.670921Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [13:25:2054] 2026-01-10T12:43:21.670962Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [14:26:2054] 2026-01-10T12:43:21.670976Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [14:26:2054] 2026-01-10T12:43:21.670996Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:21.671030Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [14:26:2054] 2026-01-10T12:43:21.671050Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [15:27:2054] 2026-01-10T12:43:21.671061Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [15:27:2054] 2026-01-10T12:43:21.671073Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:43:21.671087Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-10T12:43:21.671114Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [15:27:2054] 2026-01-10T12:43:21.671198Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-10T12:43:21.671271Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-10T12:43:21.671465Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-10T12:43:21.671565Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-10T12:43:21.794749Z node 17 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [17:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-10T12:43:21.795574Z node 17 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 1, AssignedNodes: 0 2026-01-10T12:43:21.795627Z node 17 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-10T12:43:21.795670Z ... topic part id 100 query id QueryId cookie 42 2026-01-10T12:46:02.169438Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-10T12:46:02.173241Z node 60 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: C-ares status is not ARES_SUCCESS qtype=A name=YDB_ENDPOINT is_balancer=0: DNS server returned general failure } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2026-01-10T12:46:02.174454Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1052: RowDispatcher: TEvTryConnect to node id 61 2026-01-10T12:46:02.180908Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:587: RowDispatcher: EvNodeConnected, node id 61 2026-01-10T12:46:02.181877Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-10T12:46:02.182334Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-10T12:46:02.182492Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-10T12:46:02.182875Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 41 2026-01-10T12:46:02.182988Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-10T12:46:02.183391Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-10T12:46:02.183553Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-10T12:46:02.183891Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 42 2026-01-10T12:46:02.183964Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [61:16:2053] query id QueryId, partitions size 1 2026-01-10T12:46:02.184145Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [60:22:2063] 2026-01-10T12:46:02.458650Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [62:17:2058], tenant Tenant 2026-01-10T12:46:02.458787Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [62:17:2058] 2026-01-10T12:46:02.458848Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:46:02.459110Z node 62 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [62:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-10T12:46:02.459179Z node 62 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [62:19:2060] Successfully bootstrapped, local coordinator id [62:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-10T12:46:02.473977Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-10T12:46:02.474058Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-10T12:46:02.474097Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-10T12:46:02.474477Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [62:18:2059] 2026-01-10T12:46:02.475163Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [62:14:2056], read group connection_id1, topicPath topic part id 100 query id QueryId cookie 1 2026-01-10T12:46:02.475388Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-10T12:46:02.475827Z node 62 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [62:22:2063] to [62:14:2056] query id QueryId 2026-01-10T12:46:02.475961Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1001: RowDispatcher: Received TEvStopSession from [62:14:2056] topic topic query id QueryId 2026-01-10T12:46:02.476036Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [62:14:2056] query id QueryId, partitions size 1 2026-01-10T12:46:02.476148Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [62:22:2063] 2026-01-10T12:46:02.756986Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [64:17:2058], tenant Tenant 2026-01-10T12:46:02.757080Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [64:17:2058] 2026-01-10T12:46:02.757114Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-10T12:46:02.757332Z node 64 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [64:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-10T12:46:02.757395Z node 64 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [64:19:2060] Successfully bootstrapped, local coordinator id [64:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-10T12:46:02.766264Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-10T12:46:02.766337Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-10T12:46:02.766372Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-10T12:46:02.766705Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [64:18:2059] 2026-01-10T12:46:02.767429Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-10T12:46:02.767599Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-10T12:46:02.767805Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 101 2026-01-10T12:46:02.768181Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-10T12:46:02.768582Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:14:2056] query id QueryId 2026-01-10T12:46:02.768658Z node 64 :FQ_ROW_DISPATCHER WARN: row_dispatcher.cpp:1140: RowDispatcher: Fatal session error, remove session [64:22:2063] 2026-01-10T12:46:02.768732Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:14:2056] query id QueryId, partitions size 2 2026-01-10T12:46:02.768957Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.769056Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-10T12:46:02.769148Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.769284Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-10T12:46:02.769416Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-10T12:46:02.769810Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.769884Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:15:2057] query id QueryId, partitions size 2 2026-01-10T12:46:02.769969Z node 64 :FQ_ROW_DISPATCHER ERROR: row_dispatcher.cpp:1033: RowDispatcher: Wrong readActorId [64:15:2057], no such consumer 2026-01-10T12:46:02.770020Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [64:22:2063] 2026-01-10T12:46:02.770244Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-10T12:46:02.770552Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-10T12:46:02.770622Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 100 query id QueryId 2026-01-10T12:46:02.770701Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-10T12:46:02.770804Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.770886Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 100 query id QueryId 2026-01-10T12:46:02.770968Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.771038Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-10T12:46:02.771099Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 101 query id QueryId 2026-01-10T12:46:02.771185Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-10T12:46:02.771265Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-10T12:46:02.771338Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-10T12:46:02.771402Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random |99.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TS3FIFOCache::Random [GOOD] >> TS3FIFOCache::InsertUntouched [GOOD] >> TS3FIFOCache::EnsureLimits [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential |99.3%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TScreen::Sequential [GOOD] >> TScreen::Random |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-10T12:43:37.040383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-10T12:43:37.382092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-10T12:43:37.417481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-10T12:43:37.418064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-10T12:43:37.418189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-10T12:43:38.287679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-10T12:43:38.287861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-10T12:43:38.306032Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1768049013248228 != 1768049013248231 2026-01-10T12:43:38.337338Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-10T12:43:38.395242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-10T12:43:38.584930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-10T12:43:38.996264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-10T12:43:39.014198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-10T12:43:39.982083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2748], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:39.982323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:39.982795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:883:2753], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:39.982869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-10T12:43:40.158382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-10T12:43:40.213597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvBoot 2026-01-10T12:43:40.214641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvRestored 2026-01-10T12:43:40.215036Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:912:2779] 2026-01-10T12:43:40.215305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-10T12:43:40.272574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-10T12:43:40.273301Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-10T12:43:40.273460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-10T12:43:40.277039Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-10T12:43:40.277186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-10T12:43:40.277246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-10T12:43:40.279339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-10T12:43:40.279501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-10T12:43:40.279599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2779] in generation 1 2026-01-10T12:43:40.280176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-10T12:43:40.324175Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-10T12:43:40.325784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-10T12:43:40.325937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:930:2789] 2026-01-10T12:43:40.325982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-10T12:43:40.326016Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-10T12:43:40.326058Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:43:40.326313Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:912:2779], Recipient [1:912:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:40.328354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-10T12:43:40.330776Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-10T12:43:40.330898Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-10T12:43:40.330988Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-10T12:43:40.331034Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-10T12:43:40.331089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-10T12:43:40.331130Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-10T12:43:40.331180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-10T12:43:40.331213Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-10T12:43:40.331258Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-10T12:43:40.331428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:919:2783], Recipient [1:912:2779]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:40.331481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:43:40.331537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:911:2778], serverId# [1:919:2783], sessionId# [0:0:0] 2026-01-10T12:43:40.331606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:919:2783] 2026-01-10T12:43:40.331651Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-10T12:43:40.331763Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-10T12:43:40.332162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-10T12:43:40.335921Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-10T12:43:40.336070Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-10T12:43:40.336253Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:43:40.336302Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-10T12:43:40.336344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-10T12:43:40.336392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-10T12:43:40.336742Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:43:40.336794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-10T12:43:40.336827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-10T12:43:40.336866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-10T12:43:40.336942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-10T12:43:40.336978 ... 86224037888 read iterator# {[19:1126:2934], 0} exhausted 2026-01-10T12:46:07.567341Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:46:07.567374Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2026-01-10T12:46:07.567407Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:46:07.567438Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:46:07.567498Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-10T12:46:07.567527Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:46:07.567570Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-10T12:46:07.567650Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-10T12:46:07.567839Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-10T12:46:07.888003Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [19:1137:2945], Recipient [19:919:2782]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-10T12:46:07.888174Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-10T12:46:07.888286Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [19:1136:2944], serverId# [19:1137:2945], sessionId# [0:0:0] 2026-01-10T12:46:07.888746Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [19:1135:2943], Recipient [19:919:2782]: NKikimrDataEvents.TEvWrite TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-10T12:46:07.888796Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-10T12:46:07.889017Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [19:919:2782], Recipient [19:919:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:46:07.889068Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-10T12:46:07.889206Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-10T12:46:07.889463Z node 19 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-10T12:46:07.889617Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:46:07.889757Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-10T12:46:07.889906Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckWrite 2026-01-10T12:46:07.890008Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:46:07.890083Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckWrite 2026-01-10T12:46:07.890149Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-10T12:46:07.890212Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-10T12:46:07.890284Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:46:07.890381Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-10T12:46:07.890441Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:46:07.890478Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-10T12:46:07.890513Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BlockFailPoint 2026-01-10T12:46:07.890545Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BlockFailPoint 2026-01-10T12:46:07.890574Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:46:07.890601Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BlockFailPoint 2026-01-10T12:46:07.890629Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteWrite 2026-01-10T12:46:07.890656Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteWrite 2026-01-10T12:46:07.890707Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-10T12:46:07.890798Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-10T12:46:07.890989Z node 19 :TX_DATASHARD TRACE: datashard_kqp.cpp:834: KqpCommitLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-10T12:46:07.891073Z node 19 :TX_DATASHARD TRACE: datashard_user_db.cpp:517: Committing changes lockId# 281474976715661 in localTid# 1001 shard# 72075186224037888 2026-01-10T12:46:07.891222Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-10T12:46:07.891295Z node 19 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715662 marked broken at v{min} 2026-01-10T12:46:07.891558Z node 19 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-10T12:46:07.891664Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-10T12:46:07.891716Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteWrite 2026-01-10T12:46:07.891789Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-10T12:46:07.891851Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:46:07.891954Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-10T12:46:07.892006Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-10T12:46:07.892108Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-10T12:46:07.892165Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-10T12:46:07.892226Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-10T12:46:07.892258Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-10T12:46:07.892308Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-10T12:46:07.893261Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-10T12:46:07.893353Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-10T12:46:07.893441Z node 19 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 7 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-10T12:46:07.893571Z node 19 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-10T12:46:07.894483Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553218, Sender [19:1126:2934], Recipient [19:919:2782]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 2026-01-10T12:46:07.894697Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3660: 72075186224037888 ReadAck for read iterator# {[19:1126:2934], 0}: { ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 }, read continued, bytesLeft# 4294967295, rowsLeft# 1 2026-01-10T12:46:07.894833Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553217, Sender [19:919:2782], Recipient [19:919:2782]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-10T12:46:07.894967Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037888 ReadContinue for iterator# {[19:1126:2934], 0}, firstUnprocessedQuery# 0 2026-01-10T12:46:07.895099Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037888 ReadContinue: iterator# {[19:1126:2934], 0}, FirstUnprocessedQuery# 0 2026-01-10T12:46:07.895446Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3277: 72075186224037888 read iterator# {[19:1126:2934], 0} TTxReadContinue::Execute() found broken lock# 281474976715662 2026-01-10T12:46:07.895752Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3321: 72075186224037888 read iterator# {[19:1126:2934], 0} TTxReadContinue::Execute() finished with error, aborting: ReadId: 0 SeqNo: 2 Status { Code: ABORTED Issues { message: "Read conflict with concurrent transaction" severity: 1 } } BrokenTxLocks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 18446744073709551615 SchemeShard: 72057594046644480 PathId: 38 } NodeId: 19 |99.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |99.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::S3FIFO >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll >> Backup::GenerationDirs [GOOD] >> Backup::SnapshotIOError >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> Backup::SnapshotIOError [GOOD] >> Backup::EmptyData >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory >> Backup::EmptyData [GOOD] >> Backup::SnapshotData >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> Backup::SnapshotData [GOOD] >> Backup::SnapshotLargeData >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TExecutorDb::RandomOps >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBRowLocks::LockSurvivesCompactions [GOOD] >> DBRowLocks::LockOverCompactedErase >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> DBRowLocks::LockOverCompactedErase [GOOD] >> DBRowLocks::CommitTxAfterLockThenCompact [GOOD] >> DBRowLocks::CommitLockThenCompactRowVersion [GOOD] >> DBRowLocks::OverwriteLockThenCompact [GOOD] >> DBRowLocks::LockOpenTxAndTxDataAccounting [GOOD] >> DBRowLocks::MultipleCommittedRowLocks [GOOD] >> DBRowLocks::LocksCommittedRemovedIteration [GOOD] >> DBRowLocks::LocksReplay [GOOD] >> DBRowLocks::LocksMvccCompact [GOOD] >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBRowLocks::LocksMvccCompact [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 23928 rows, 1015 pages, 5 levels: (157741, 52588) (315424, 105149) (479998, 160007) (639733, 213252) (799132, 266385) [0:0:2:0:0:0:0] 24216 rows, 1025 pages, 5 levels: (158968, 52997) (323176, 107733) (478777, 159600) (638131, 212718) (798961, 266328) [0:0:3:0:0:0:0] 23857 rows, 1008 pages, 5 levels: (161719, 53914) (324091, 108038) (482023, 160682) (640741, 213588) (798478, 266167) [0:0:4:0:0:0:0] 24184 rows, 1023 pages, 5 levels: (160366, 53463) (321823, 107282) (478882, 159635) (641413, 213812) (799024, 266349) [0:0:5:0:0:0:0] 23945 rows, 1019 pages, 5 levels: (160006, 53343) (321943, 107322) (483100, 161041) (641107, 213710) (799117, 266380) [0:0:6:0:0:0:0] 23619 rows, 1005 pages, 5 levels: (161371, 53798) (319855, 106626) (480928, 160317) (636934, 212319) (799258, 266427) [0:0:7:0:0:0:0] 23988 rows, 1019 pages, 5 levels: (154531, 51518) (314071, 104698) (475438, 158487) (636523, 212182) (798766, 266263) [0:0:8:0:0:0:0] 23770 rows, 1013 pages, 5 levels: (160948, 53657) (318202, 106075) (477640, 159221) (640657, 213560) (799090, 266371) [0:0:9:0:0:0:0] 24256 rows, 1029 pages, 5 levels: (157747, 52590) (320038, 106687) (482770, 160931) (638905, 212976) (799195, 266406) [0:0:10:0:0:0:0] 24237 rows, 1026 pages, 5 levels: (162409, 54144) (317659, 105894) (477673, 159232) (637528, 212517) (798748, 266257) Checking BTree: Touched 1% bytes, 37 pages RowCountHistogram: 9% (actual 6%) key = (54346, 18123) value = 23288 (actual 16627 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 47414 (actual 46964 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 71513 (actual 62823 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 95735 (actual 93347 - 0% error) 10% (actual 11%) key = (400444, 133489) value = 119910 (actual 120422 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 144061 (actual 138588 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 168190 (actual 169409 - 0% error) 10% (actual 5%) key = (611236, 203753) value = 192378 (actual 183755 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 215613 (actual 212448 - 1% error) 10% (actual 11%) DataSizeHistogram: 9% (actual 6%) key = (54346, 18123) value = 1986792 (actual 1422570 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 4036041 (actual 4004054 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 6085370 (actual 5348583 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 8135866 (actual 7931163 - 1% error) 10% (actual 11%) key = (400444, 133489) value = 10187497 (actual 10227908 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 12238449 (actual 11773611 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 14287401 (actual 14387497 - 0% error) 10% (actual 6%) key = (611236, 203753) value = 16340389 (actual 15610901 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 18309634 (actual 18041898 - 1% error) 10% (actual 11%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79777, 26600) value = 24001 (actual 24257 - 0% error) 10% (actual 10%) key = (159688, 53237) value = 48010 (actual 48277 - 0% error) 10% (actual 10%) key = (239839, 79954) value = 72013 (actual 72278 - 0% error) 10% (actual 9%) key = (319807, 106610) value = 96022 (actual 96277 - 0% error) 10% (actual 10%) key = (399964, 133329) value = 120041 (actual 120304 - 0% error) 10% (actual 10%) key = (479791, 159938) value = 144061 (actual 144321 - 0% error) 10% (actual 10%) key = (559867, 186630) value = 168077 (actual 168330 - 0% error) 10% (actual 10%) key = (639661, 213228) value = 192085 (actual 192333 - 0% error) 10% (actual 10%) key = (719458, 239827) value = 216091 (actual 216348 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79318, 26447) value = 2038035 (actual 2060169 - 0% error) 10% (actual 10%) key = (159028, 53017) value = 4076033 (actual 4098046 - 0% error) 10% (actual 10%) key = (239581, 79868) value = 6115440 (actual 6137485 - 0% error) 10% (actual 10%) key = (319516, 106513) value = 8153742 (actual 8175567 - 0% error) 10% (actual 10%) key = (399841, 133288) value = 10191957 (actual 10213746 - 0% error) 10% (actual 10%) key = (479734, 159919) value = 12230556 (actual 12252749 - 0% error) 10% (actual 10%) key = (559552, 186525) value = 14269383 (actual 14291350 - 0% error) 10% (actual 10%) key = (639193, 213072) value = 16307737 (actual 16329710 - 0% error) 10% (actual 10%) key = (719326, 239783) value = 18346896 (actual 18369051 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 43 pages RowCountHistogram: 10% (actual 6%) key = (50749, 16924) value = 24065 (actual 15550 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 48098 (actual 44756 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 72300 (actual 61833 - 4% error) 10% (actual 12%) key = (301159, 100394) value = 96516 (actual 90698 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 120685 (actual 119332 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 144842 (actual 136562 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 168942 (actual 165043 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 193089 (actual 183462 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 218665 (actual 210950 - 3% error) 8% (actual 12%) DataSizeHistogram: 10% (actual 6%) key = (50749, 16924) value = 2051869 (actual 1330161 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 4100433 (actual 3812568 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 6148888 (actual 5264750 - 4% error) 10% (actual 11%) key = (301159, 100394) value = 8200933 (actual 7706870 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 10251926 (actual 10135710 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 12302580 (actual 11601475 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 14351377 (actual 14019410 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 16401437 (actual 15584938 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 18568091 (actual 17915901 - 3% error) 8% (actual 12%) 10 parts: [0:0:1:0:0:0:0] 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) [0:0:2:0:0:0:0] 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) [0:0:3:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) [0:0:4:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) [0:0:5:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) [0:0:6:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) [0:0:7:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) [0:0:8:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) [0:0:9:0:0:0:0] 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) [0:0:10:0:0:0:0] 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> TSharedPageCache_Actor::Attach_Basics |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::InMemory_AttachRepeated >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TSharedPageCache_Actor::InMemory_AttachRepeated [GOOD] >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive >> TSharedPageCache_Actor::Evict_Passive [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache_Actor::Evict_Passive [GOOD] Test command err: 0.29162 00000.001 II| FAKE_ENV: Born at 2026-01-10T12:46:09.947867Z 00000.040 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.046 II| FAKE_ENV: Starting storage for BS group 0 00000.048 II| FAKE_ENV: Starting storage for BS group 1 00000.048 II| FAKE_ENV: Starting storage for BS group 2 00000.048 II| FAKE_ENV: Starting storage for BS group 3 00000.071 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.075 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.089 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} queued, type NKikimr::NSharedCache::TTxInitSchema 00000.089 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.092 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 500b annex 0, ~{ } -{ }, 0 gb} 00000.092 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.092 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.092 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.092 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.093 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.096 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 00000.096 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.096 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.096 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.099 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.099 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.106 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.106 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.106 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.107 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.107 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.107 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.107 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.110 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.110 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.112 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.112 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.112 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.113 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.113 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.114 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.114 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.114 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.115 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.115 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.115 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.115 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.116 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.116 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.116 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.116 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.117 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.118 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.120 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.120 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.120 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.121 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.121 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.121 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.121 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.122 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.122 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.122 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.123 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.123 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.123 DD| TABLET_EXECUTOR: Leader{1:2:16} commited cookie 1 for step 15 00000.124 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.124 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.125 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{15, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.125 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.126 DD| TABLET_EXECUTOR: Leader{1:2:17} commited cookie 1 for step 16 00000.127 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.127 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.127 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{16, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.127 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.129 DD| TABLET_EXECUTOR: Leader{1:2:18} commited cookie 1 for step 17 00000.130 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.130 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.130 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{17, r ... :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #5 (done) Checking results#5 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.536417Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 6 class Online from cache [ ] already requested [ ] to request [ 14 ] 2026-01-10T12:46:24.536467Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 13 ] owner [33:5:2052] 2026-01-10T12:46:24.536508Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #6 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #6 (done) Checking fetches#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] ... waiting for results #6 2026-01-10T12:46:24.536638Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 14 ] 2026-01-10T12:46:24.536667Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 14 ] cookie 6 2026-01-10T12:46:24.536703Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #6 (done) Checking results#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.536798Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 7 class Online from cache [ ] already requested [ ] to request [ 15 ] 2026-01-10T12:46:24.536870Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 14 ] owner [33:5:2052] 2026-01-10T12:46:24.536908Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #7 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #7 (done) Checking fetches#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] ... waiting for results #7 2026-01-10T12:46:24.537065Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 15 ] 2026-01-10T12:46:24.537094Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 15 ] cookie 7 2026-01-10T12:46:24.537131Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #7 (done) Checking results#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.537266Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 8 class Online from cache [ ] already requested [ ] to request [ 16 ] 2026-01-10T12:46:24.537328Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 15 ] owner [33:5:2052] 2026-01-10T12:46:24.537367Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #8 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #8 (done) Checking fetches#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] ... waiting for results #8 2026-01-10T12:46:24.537518Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 16 ] 2026-01-10T12:46:24.537547Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 16 ] cookie 8 2026-01-10T12:46:24.537585Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #8 (done) Checking results#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.537716Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 9 class Online from cache [ ] already requested [ ] to request [ 17 ] 2026-01-10T12:46:24.537769Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 16 ] owner [33:5:2052] 2026-01-10T12:46:24.537803Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #9 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #9 (done) Checking fetches#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] ... waiting for results #9 2026-01-10T12:46:24.537932Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 17 ] 2026-01-10T12:46:24.537971Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 17 ] cookie 9 2026-01-10T12:46:24.538012Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #9 (done) Checking results#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.538142Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 10 class Online from cache [ ] already requested [ ] to request [ 18 ] 2026-01-10T12:46:24.538194Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 17 ] owner [33:5:2052] 2026-01-10T12:46:24.538229Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #10 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #10 (done) Checking fetches#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for results #10 2026-01-10T12:46:24.538354Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 18 ] 2026-01-10T12:46:24.538381Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 18 ] cookie 10 2026-01-10T12:46:24.538413Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #10 (done) Checking results#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:46:24.538511Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 11 class Online from cache [ ] already requested [ ] to request [ 19 ] 2026-01-10T12:46:24.538560Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 18 ] owner [33:5:2052] 2026-01-10T12:46:24.538593Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #11 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #11 (done) Checking fetches#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] ... waiting for results #11 2026-01-10T12:46:24.538722Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 19 ] 2026-01-10T12:46:24.538767Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 19 ] cookie 11 2026-01-10T12:46:24.538813Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #11 (done) Checking results#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] ... waiting for NActors::TEvents::TEvWakeup 2026-01-10T12:46:24.538942Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-10T12:46:24.539004Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) ... waiting for NActors::TEvents::TEvWakeup 2026-01-10T12:46:24.539132Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-10T12:46:24.539221Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 1 2 3 ] owner [33:5:2052] 2026-01-10T12:46:24.539302Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::WreckPart |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/dstool/py3test >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/dstool/py3test >> DBase::Select [GOOD] >> DBase::Defaults [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey |99.4%| [TM] {RESULT} ydb/tests/functional/dstool/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TPart::WreckPart [GOOD] >> TPart::WreckPartColumnGroups >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> TDqPqRdReadActorTests::Backpressure [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPartBtreeIndexIteration::NoNodes >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> TExecutorDb::RandomOps [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] |99.4%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> Memtable::Wreck [GOOD] >> Memtable::Erased >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TDqPqRdReadActorTests::TestReadFromTopicFirstWatermark [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NOther::Blocks [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 [GOOD] >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TDqPqRdReadActorTests::TestWatermarksWhere >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TIterator::Single [GOOD] >> TIterator::SingleReverse |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TDqPqRdReadActorTests::TestWatermarksWhere [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TDqPqRdReadActorTests::TestWatermarksWhereFalse [GOOD] >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset [GOOD] >> TDqPqReadActorTest::TestReadFromTopic |99.4%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] |99.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TDqPqReadActorTest::TestReadFromTopicFromNow >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> TDqPqReadActorTest::ReadWithFreeSpace >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv >> TDqPqReadActorTest::ReadNonExistentTopic >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TDqPqReadActorTest::TestSaveLoadPqRead >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TPart::ForwardEnv [GOOD] >> TPart::ForwardEnvColumnGroups >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPart::CutKeys_CutUtf8String [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:17.403405Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.012 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.012 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.012 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{4, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{5, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{6, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{7, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{8, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{9, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{10, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{11, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{12, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{13, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{14, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memor ... {[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{11} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 10 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{3} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{3} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{3} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{3} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 280b 2r} data 904b + FlatIndex{3} Label{3 rev 3, 401b} 3 rec | Page Row Bytes (String) | 0 0 140b {____________________________________________________________________________________________________cccddd} | 1 1 140b {____________________________________________________________________________________________________cd} | 1 1 140b {____________________________________________________________________________________________________cddddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 280 ErasedRowCount: 0} Label{13 rev 1, 204b} | PageId: 0 RowCount: 1 DataSize: 140 ErasedRowCount: 0 | > {____________________________________________________________________________________________________cd} | PageId: 1 RowCount: 2 DataSize: 280 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x89\x91} | 1 1 49b {abc\xF0\x9F\x89\x91\xF0\x9F\x89\x91\xF0\x9F\x89\x91} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x89\x91} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 342b + FlatIndex{3} Label{3 rev 3, 120b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 46b {abc\xE2\x9A\xAB} | 1 1 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xE2\x9A\xAB} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 344b + FlatIndex{3} Label{3 rev 3, 121b} 3 rec | Page Row Bytes (Utf8) | 0 0 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 46 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 332b + FlatIndex{3} Label{3 rev 3, 115b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {abcxxx} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 326b + FlatIndex{3} Label{3 rev 3, 112b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 40b {abcx} | 1 1 40b {abcxxx} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abcx} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x96} | 1 1 49b {abc\xF0\x9F\x98\x96\xF0\x9F\x98\x96\xF0\x9F\x98\x96} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x96} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{10} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{21} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 20 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERo ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{29} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/sql/py3test |99.4%| [TM] {RESULT} ydb/tests/sql/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> Backup::SnapshotLargeData [GOOD] >> Backup::SnapshotSchema [GOOD] >> Backup::ChangelogData >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> Backup::ChangelogData [GOOD] >> Backup::ChangelogLargeData >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> test_public_api.py::TestExplain::test_explain_data_query |99.4%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [FAIL] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.828677Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.013 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.139 II| TABLET_EXECUTOR: Leader{1:2:269} starting compaction 00000.139 II| TABLET_EXECUTOR: Leader{1:2:270} starting Scan{1 on 3, Compact{1.2.269, eph 1}} 00000.139 II| TABLET_EXECUTOR: Leader{1:2:270} started compaction 1 00000.139 II| TABLET_OPS_HOST: Scan{1 on 3, Compact{1.2.269, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.143 II| TABLET_OPS_HOST: Scan{1 on 3, Compact{1.2.269, eph 1}} end=Done, 110r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 12 of 20 ~1p 00000.143 II| OPS_COMPACT: Compact{1.2.269, eph 1} end=Done, 13 blobs 77r (max 110), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 12 +9, (150405 6931 178697)b }, ecr=1.000 00000.162 II| TABLET_EXECUTOR: Leader{1:2:270} Compact 1 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 269, product {1 parts epoch 2} done 00000.184 II| TABLET_EXECUTOR: Leader{1:2:294} starting compaction 00000.185 II| TABLET_EXECUTOR: Leader{1:2:295} starting Scan{3 on 2, Compact{1.2.294, eph 1}} 00000.185 II| TABLET_EXECUTOR: Leader{1:2:295} started compaction 3 00000.185 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.294, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.187 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.294, eph 1}} end=Done, 110r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 16 of 19 ~1p 00000.188 II| OPS_COMPACT: Compact{1.2.294, eph 1} end=Done, 9 blobs 82r (max 110), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 16 +5, (165934 27089 192663)b }, ecr=1.000 00000.190 II| TABLET_EXECUTOR: Leader{1:2:296} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 294, product {1 parts epoch 2} done 00000.204 II| TABLET_EXECUTOR: LSnap{1:2, on 2:301, 6122b, wait} done, Waste{2:0, 967011b +(154, 115604b), 300 trc} 00000.419 II| TABLET_EXECUTOR: Leader{1:2:592} starting compaction 00000.420 II| TABLET_EXECUTOR: Leader{1:2:593} starting Scan{5 on 3, Compact{1.2.592, eph 2}} 00000.420 II| TABLET_EXECUTOR: Leader{1:2:593} started compaction 5 00000.420 II| TABLET_OPS_HOST: Scan{5 on 3, Compact{1.2.592, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.422 II| TABLET_OPS_HOST: Scan{5 on 3, Compact{1.2.592, eph 2}} end=Done, 100r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 17 of 23 ~1p 00000.423 II| OPS_COMPACT: Compact{1.2.592, eph 2} end=Done, 12 blobs 100r (max 100), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 17 +8, (109439 13641 217702)b }, ecr=1.000 00000.425 II| TABLET_EXECUTOR: Leader{1:2:594} Compact 5 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 592, product {1 parts epoch 3} done 00000.426 II| TABLET_EXECUTOR: Leader{1:2:595} starting compaction 00000.426 II| TABLET_EXECUTOR: Leader{1:2:596} starting Scan{7 on 3, Compact{1.2.595, eph 2}} 00000.426 II| TABLET_EXECUTOR: Leader{1:2:596} started compaction 7 00000.426 II| TABLET_OPS_HOST: Scan{7 on 3, Compact{1.2.595, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.432 II| TABLET_OPS_HOST: Scan{7 on 3, Compact{1.2.595, eph 2}} end=Done, 153r seen, TFwd{fetch=252KiB,saved=252KiB,usage=252KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 35 of 46 ~3p 00000.433 II| OPS_COMPACT: Compact{1.2.595, eph 2} end=Done, 4 blobs 125r (max 177), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 35 +0, (220704 20572 304122)b }, ecr=1.000 00000.452 II| TABLET_EXECUTOR: Leader{1:2:596} Compact 7 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 595, product {1 parts epoch 0} done 00000.470 II| TABLET_EXECUTOR: LSnap{1:2, on 2:601, 7549b, wait} done, Waste{2:0, 1565792b +(90, 416400b), 300 trc} 00000.479 II| TABLET_EXECUTOR: Leader{1:2:618} starting compaction 00000.479 II| TABLET_EXECUTOR: Leader{1:2:619} starting Scan{9 on 2, Compact{1.2.618, eph 2}} 00000.479 II| TABLET_EXECUTOR: Leader{1:2:619} started compaction 9 00000.479 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.618, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.482 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.618, eph 2}} end=Done, 106r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 13 of 15 ~1p 00000.482 II| OPS_COMPACT: Compact{1.2.618, eph 2} end=Done, 9 blobs 106r (max 106), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 13 +5, (136170 27277 159597)b }, ecr=1.000 00000.498 II| TABLET_EXECUTOR: Leader{1:2:620} Compact 9 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 618, product {1 parts epoch 3} done 00000.499 II| TABLET_EXECUTOR: Leader{1:2:621} starting compaction 00000.499 II| TABLET_EXECUTOR: Leader{1:2:622} starting Scan{11 on 2, Compact{1.2.621, eph 2}} 00000.499 II| TABLET_EXECUTOR: Leader{1:2:622} started compaction 11 00000.500 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.621, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.505 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.621, eph 2}} end=Done, 164r seen, TFwd{fetch=274KiB,saved=274KiB,usage=274KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 34 of 39 ~3p 00000.506 II| OPS_COMPACT: Compact{1.2.621, eph 2} end=Done, 4 blobs 135r (max 188), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 34 +0, (254504 34004 301698)b }, ecr=1.000 00000.508 II| TABLET_EXECUTOR: Leader{1:2:625} Compact 11 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 621, product {1 parts epoch 0} done 00000.714 II| TABLET_EXECUTOR: LSnap{1:2, on 2:901, 9362b, wait} done, Waste{2:0, 2148095b +(93, 385613b), 300 trc} 00000.735 II| TABLET_EXECUTOR: Leader{1:2:922} starting compaction 00000.735 II| TABLET_EXECUTOR: Leader{1:2:923} starting Scan{13 on 3, Compact{1.2.922, eph 3}} 00000.735 II| TABLET_EXECUTOR: Leader{1:2:923} started compaction 13 00000.735 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.922, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.738 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.922, eph 3}} end=Done, 110r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 10 of 13 ~1p 00000.738 II| OPS_COMPACT: Compact{1.2.922, eph 3} end=Done, 10 blobs 110r (max 110), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 10 +6, (152372 46951 145438)b }, ecr=1.000 00000.741 II| TABLET_EXECUTOR: Leader{1:2:923} Compact 13 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 922, product {1 parts epoch 4} done 00000.814 II| TABLET_EXECUTOR: Leader{1:2:992} starting compaction 00000.815 II| TABLET_EXECUTOR: Leader{1:2:993} starting Scan{15 on 2, Compact{1.2.992, eph 3}} 00000.815 II| TABLET_EXECUTOR: Leader{1:2:993} started compaction 15 00000.815 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.992, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.817 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.992, eph 3}} end=Done, 108r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 11 of 18 ~1p 00000.818 II| OPS_COMPACT: Compact{1.2.992, eph 3} end=Done, 16 blobs 108r (max 108), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 11 +12, (170916 27498 193175)b }, ecr=1.000 00000.819 II| TABLET_EXECUTOR: Leader{1:2:994} Compact 15 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 992, product {1 parts epoch 4} done 00001.019 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1201, 11448b, wait} done, Waste{2:0, 3052986b +(152, 105646b), 300 trc} 00001.040 II| TABLET_EXECUTOR: Leader{1:2:1227} starting compaction 00001.040 II| TABLET_EXECUTOR: Leader{1:2:1228} starting Scan{17 on 3, Compact{1.2.1227, eph 4}} 00001.040 II| TABLET_EXECUTOR: Leader{1:2:1228} started compaction 17 00001.040 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1227, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00001.043 II| TABLET_OPS_HOST: Scan{17 on 3, Compact{1.2.1227, eph 4}} end=Done, 103r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 6 of 8 ~1p 00001.043 II| OPS_COMPACT: Compact{1.2.1227, eph 4} end=Done, 11 blobs 103r (max 103), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 6 +7, (152780 6913 108780)b }, ecr=1.000 00001.045 II| TABLET_EXECUTOR: Leader{1:2:1229} Compact 17 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1227, product {1 parts epoch 5} done 00001.046 II| TABLET_EXECUTOR: Leader{1:2:1230} starting compaction 00001.046 II| TABLET_EXECUTOR: Leader{1:2:1231} starting Scan{19 on 3, Compact{1.2.1230, eph 4}} 00001.046 II| TABLET_EXECUTOR: Leader{1:2:1231} started compaction 19 00001.047 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1230, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00001.052 II| TABLET_OPS_HOST: Scan{19 on 3, Compact{1.2.1230, eph 4}} end=Done, 172r seen, TFwd{fetch=297KiB,saved=297KiB,usage=291KiB,after=6.38KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 23 of 29 ~3p 00001.053 II| OPS_COMPACT: Compact{1.2.1230, eph 4} end=Done, 4 blobs 172r (max 213), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 23 +0, (261329 47333 199742)b }, ecr=1.000 00001.069 II| TABLET_EXECUTOR: Leader{1:2:1233} Compact 19 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 1230, product {1 parts epoch 0} done 00001.162 II| TABLET_EXECUTOR: Leader{1:2:1320} starting compaction 00001.162 II| TABLET_EXECUTOR: Leader{1:2:1321} starting Scan{21 on 2, Compact{1.2.1320, eph 4}} 00001.162 II| TABLET_EXECUTOR: Leader{1:2:1321} started compaction 21 00001.162 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1320, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00001.165 II| TABLET_OPS_HOST: Scan{21 on 2, Compact{1.2.1320, eph 4}} end=Done, 105r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 9 of 13 ~1p 00001.165 II| OPS_COMPACT: Compact{1.2.1320, eph 4} end=Done, 9 blobs 105r (max 105), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 9 +5, (170716 20590 127357)b }, ecr=1.000 00001.176 II| TABLET_EXECUTOR: Leader{1:2:1322} Compact 21 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1320, product {1 parts epoch 5} done 00001.178 II| TABLET_EXECUTOR: Leader{1:2:1323} starting compaction 00001.178 II| TABLET_EXECUTOR: Leader{1:2:1324} starting Scan{23 on 2, Compact{1.2.1323, eph 4}} 00001.178 II| TABLET_EXECUTOR: Leader{1:2:1324} started compaction 23 00001.178 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1323, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00001.185 II| TABLET_OPS_HOST: Scan{23 on 2, Compact{1.2.1323, eph 4}} end=Done, 169r seen, TFwd{fetch=326KiB,saved=326KiB,usage=313KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 30 of 37 ~3p 00001.185 II| OPS_COMPACT: Compact{1.2.1323, eph 4} end=Done, 4 blobs 169r (max 213), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 30 +0, (253186 27458 267141)b }, ecr=1.000 00001.187 II| TABLET_EXECUTOR: Leader{1:2:1326} Compact 23 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 1323, product {1 parts epoch 0} done 00001.379 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1501, 12990b, wait} done, Waste{2:0, 3715247b +( ... DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [59:30:2062]) to queue queue_compaction_gen0 00000.014 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.014 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.014 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.014 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 5 00000.016 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {tx status + 1 parts epoch 2} done 00000.016 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.016 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.016 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.017 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [59:30:2062]) (release resources {1, 0}) 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting until compacted 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 1 ...compacting 00000.018 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Mem 00000.018 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (2 by [59:30:2062]) priority=5 resources={1, 0} 00000.018 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [59:30:2062]) from queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.018 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.018 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.018 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.019 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 7, product {0 parts epoch 2} done 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.019 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [59:30:2062]) (release resources {1, 0}) 00000.019 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting until compacted 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 0 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.022 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 478b +(3, 191b), 9 trc, -191b acc} 00000.025 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.026 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 258 bytes, 258 total, blobs: { [1:2:2:1:8192:84:0], [1:2:6:1:32768:124:0], [1:2:8:1:32768:50:0] } 00000.026 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 384 bytes, 384 total, blobs: { [1:2:5:1:12288:158:0], [1:2:3:1:24576:78:0], [1:2:4:1:24576:65:0], [1:2:9:1:24576:83:0] } 00000.027 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.027 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.027 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.027 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.027 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 239b, wait} done, Waste{2:0, 478b +(3, 191b), 9 trc} 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ... checking rows 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} pin 0 (0 b) load 1 (55 b) 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} postponed, loading 1 pages, 55 bytes, newly pinned 0 pages, 0 bytes 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:158:0] ok OK}, type 1 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} activated 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.030 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.030 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 478b +(0, 0b), 1 trc, -191b acc} 00000.030 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 91b} miss {0 0b} in-memory miss {0 0b} 00000.030 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.030 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {798b, 12} 00000.030 II| FAKE_ENV: DS.1 gone, left {717b, 5}, put {1117b, 11} 00000.031 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: All BS storage groups are stopped 00000.031 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.031 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 126}, stopped |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> Backup::ChangelogLargeData [GOOD] >> Backup::ChangelogManyCommits >> Backup::ChangelogManyCommits [GOOD] >> Backup::ChangelogSchema >> Backup::ChangelogSchema [GOOD] >> Backup::ChangelogSchemaAndData |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> Backup::ChangelogSchemaAndData [GOOD] >> Backup::ChangelogSchemaNewColumn >> Backup::ChangelogSchemaNewColumn [GOOD] >> Backup::ChangelogTornWrite [GOOD] >> Backup::ExcludeTablet >> Backup::ExcludeTablet [GOOD] >> Backup::RecoveryModeKeepsData [GOOD] >> Backup::NoBackupTable >> Backup::NoBackupTable [GOOD] >> Backup::NoBackupColumn >> Backup::NoBackupColumn [GOOD] >> Backup::NewSnapshotChangelogSize >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> Backup::NewSnapshotChangelogSize [GOOD] >> Bloom::Conf [GOOD] >> Bloom::Hashes [GOOD] >> Bloom::Rater >> TPqWriterTest::TestWriteToTopic >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TPqWriterTest::TestWriteToTopic [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> TPqWriterTest::TestDeferredWriteToTopic >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> TPqWriterTest::WriteNonExistentTopic >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> test_vdisks.py::TestTinyVDisks::test_disabled >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TPqWriterTest::TestCheckpoints [GOOD] >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> TPqWriterTest::TestCheckpointWithEmptyBatch >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2026-01-10T12:44:13.109395Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7593712860757861134:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-10T12:44:13.154428Z node 1 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-10T12:44:13.154743Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-10T12:44:13.154782Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-10T12:44:13.154802Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7593712860757861134:2054]) 2026-01-10T12:44:13.154845Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7593712860757861140:2048] 2026-01-10T12:44:13.155658Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7593712860757861135:2055] 2026-01-10T12:44:13.155720Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7593712860757861135:2055], partIds: 666 cookie 1 2026-01-10T12:44:13.156508Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7593712860757861135:2055], cookie 1 2026-01-10T12:44:13.156538Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-10T12:44:13.156553Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-10T12:44:13.156599Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7593712860757861137:2057], generation 1 2026-01-10T12:44:13.156653Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7593712860757861137:2057], connection id 1 partitions offsets (666 / ), 2026-01-10T12:44:13.157332Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7593712860757861137:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-10T12:44:13.157588Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7593712860757861137:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-10T12:44:13.164072Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7593712860757861137:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-10T12:44:13.165477Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-10T12:44:13.165495Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-10T12:44:13.166017Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-10T12:44:13.166214Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-10T12:44:13.166232Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-10T12:44:13.167427Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:744: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2026-01-10T12:44:13.167541Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1242: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7593712860757861137:2057] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2026-01-10T12:44:13.167558Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:736: SelfId: [1:7593712860757861140:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7593712860757861137:2057] generation 1 2026-01-10T12:44:13.612804Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7593712864448875020:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-10T12:44:13.613271Z node 2 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-10T12:44:13.613407Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-10T12:44:13.613463Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-10T12:44:13.613475Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7593712864448875020:2054]) 2026-01-10T12:44:13.613512Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [2:7593712864448875026:2048] 2026-01-10T12:44:13.613759Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [2:7593712864448875021:2055] 2026-01-10T12:44:13.613815Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [2:7593712864448875021:2055], partIds: 666 cookie 1 2026-01-10T12:44:13.614078Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [2:7593712864448875021:2055], cookie 1 2026-01-10T12:44:13.614092Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-10T12:44:13.614098Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-10T12:44:13.614117Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [2:7593712864448875023:2057], generation 1 2026-01-10T12:44:13.614146Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [2:7593712864448875023:2057], connection id 1 partitions offsets (666 / ), 2026-01-10T12:44:13.614365Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [2:7593712864448875023:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-10T12:44:13.614459Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7593712864448875023:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-10T12:44:13.615097Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7593712864448875023:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-10T12:44:13.615122Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-10T12:44:13.615128Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-10T12:44:13.615244Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-10T12:44:13.615318Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-10T12:44:13.615333Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-10T12:44:13.616418Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1097: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7593712864448875023:2057], reason Disconnected, cookie 999 2026-01-10T12:44:13.616490Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7593712864448875023:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-10T12:44:13.616599Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:963: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2026-01-10T12:44:13.617215Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7593712864448875026:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7593712864448875023:2057 ... d session. Close timeout: 0.000000s 2026-01-10T12:48:01.670092Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-10T12:48:01.670138Z :INFO: [local] [local] [7efa3c68-588a1871-18bd11a-27d8c0da] Counters: { Errors: 0 CurrentSessionLifetimeMs: 73 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:48:01.670255Z :NOTICE: [local] [local] [7efa3c68-588a1871-18bd11a-27d8c0da] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:48:01.673768Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb|aad1a0b6-d434256f-5c674e37-99a53251_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-10T12:48:01.673804Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb|aad1a0b6-d434256f-5c674e37-99a53251_0] PartitionId [0] Generation [1] Write session will now close 2026-01-10T12:48:01.673848Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb|aad1a0b6-d434256f-5c674e37-99a53251_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-10T12:48:01.674240Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb|aad1a0b6-d434256f-5c674e37-99a53251_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-10T12:48:01.674305Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb|aad1a0b6-d434256f-5c674e37-99a53251_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-10T12:48:02.094447Z node 46 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:268: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Load state: { SourceId: "d64be4e7-cc83ff77-9aed48b1-8dfc11cb" ConfirmedSeqNo: 3 EgressBytes: 3 } 2026-01-10T12:48:02.094604Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 2. Checkpoint: 0. Finished: 0 2026-01-10T12:48:02.107196Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 4 2026-01-10T12:48:02.107238Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 5 2026-01-10T12:48:02.112834Z :INFO: [local] OnFederationDiscovery fall back to single mode, database=local [] [] Start federated write session to database '' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "" DbInfos: [ { path: "local" endpoint: "localhost:18284" status: AVAILABLE weight: 100 } ] ControlPlaneEndpoint: localhost:18284 }2026-01-10T12:48:02.113235Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: try to update token 2026-01-10T12:48:02.113651Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Start write session. Will connect to nodeId: 0 2026-01-10T12:48:02.115259Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] Starting read session 2026-01-10T12:48:02.115320Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] Starting single session 2026-01-10T12:48:02.116023Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:48:02.116113Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:48:02.116180Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Reconnecting session to cluster in 0.000000s 2026-01-10T12:48:02.123038Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: write to message_group: d64be4e7-cc83ff77-9aed48b1-8dfc11cb 2026-01-10T12:48:02.123176Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: send init request: init_request { path: "Checkpoints" producer_id: "d64be4e7-cc83ff77-9aed48b1-8dfc11cb" message_group_id: "d64be4e7-cc83ff77-9aed48b1-8dfc11cb" } 2026-01-10T12:48:02.123215Z :TRACE: [local] TRACE_EVENT InitRequest 2026-01-10T12:48:02.123297Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Successfully connected. Initializing session 2026-01-10T12:48:02.123570Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: OnWriteDone gRpcStatusCode: 0 2026-01-10T12:48:02.125303Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Got InitResponse. ReadSessionId: test_client_1_22_2687331002809675442_v1 2026-01-10T12:48:02.125364Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-10T12:48:02.125557Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-10T12:48:02.128467Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2026-01-10T12:48:02.131074Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Got ReadResponse, serverBytesSize = 1089, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427711 2026-01-10T12:48:02.131201Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427711 2026-01-10T12:48:02.131449Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2026-01-10T12:48:02.131538Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Returning serverBytesSize = 1089 to budget 2026-01-10T12:48:02.131585Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] In ContinueReadingDataImpl, ReadSizeBudget = 1089, ReadSizeServerDelta = 52427711 2026-01-10T12:48:02.131838Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-10T12:48:02.131916Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-10T12:48:02.131960Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-10T12:48:02.131982Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-10T12:48:02.132006Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (3-3) 2026-01-10T12:48:02.132050Z :DEBUG: [local] Take Data. Partition 0. Read: {4, 0} (4-4) 2026-01-10T12:48:02.132226Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2026-01-10T12:48:02.132244Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] Closing read session. Close timeout: 0.000000s 2026-01-10T12:48:02.132283Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Returning serverBytesSize = 0 to budget 2026-01-10T12:48:02.132348Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-10T12:48:02.132417Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] Counters: { Errors: 0 CurrentSessionLifetimeMs: 17 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:48:02.132587Z :NOTICE: [local] [local] [86e2194e-138de548-b18136be-45586c58] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-10T12:48:02.132644Z :DEBUG: [local] [local] [86e2194e-138de548-b18136be-45586c58] [] Abort session to cluster 2026-01-10T12:48:02.133249Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] Closing read session. Close timeout: 0.000000s 2026-01-10T12:48:02.133306Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-10T12:48:02.133380Z :INFO: [local] [local] [86e2194e-138de548-b18136be-45586c58] Counters: { Errors: 0 CurrentSessionLifetimeMs: 18 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:48:02.133508Z :NOTICE: [local] [local] [86e2194e-138de548-b18136be-45586c58] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-10T12:48:02.137276Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: close. Timeout 0.000000s 2026-01-10T12:48:02.137323Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session will now close 2026-01-10T12:48:02.137380Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: aborting 2026-01-10T12:48:02.137892Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: gracefully shut down, all writes complete 2026-01-10T12:48:02.138377Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-10T12:48:02.138467Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1768049282138 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-10T12:48:02.138522Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-10T12:48:02.138564Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session is aborting and will not restart 2026-01-10T12:48:02.138633Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [d64be4e7-cc83ff77-9aed48b1-8dfc11cb] Write session: destroy 2026-01-10T12:48:02.743906Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2026-01-10T12:48:02.757047Z node 47 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:240: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2026-01-10T12:48:02.757250Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:418: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "60c0e650-e831680-a2fbda05-7c05d22a" } |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/pq_async_io/ut/unittest >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values |99.5%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.5%| [TA] $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] |99.5%| [TM] {RESULT} ydb/tests/fq/restarts/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/restarts/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Many_Serial [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.096701Z ...starting tablet 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.019 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.024 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...restarting tablet 00000.029 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.030 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g3_s2/changelog.json ...restarting tablet again 00000.035 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g4_s2/snapshot 00000.036 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpxVs1Yy/dummy/1/backup_19700101000000Z_g4_s2/changelog.json 00000.036 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.037 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.037 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.037 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {186b, 6} 00000.037 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.037 II| FAKE_ENV: All BS storage groups are stopped 00000.037 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.038 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.141305Z ...starting tablet 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp0khpC9/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.020 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp0khpC9/dummy/1/backup_19700101000000Z_g2_s2/changelog.json 00000.155 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception yexception: ydb/core/tablet_flat/flat_executor.cpp:5177: Backup snapshot failed: Failed to create snapshot dir /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp0khpC9/dummy/1/backup_19700101000000Z_g2_s2/snapshot: (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp0khpC9/dummy/1 ??+0 (0x1241BD1D) __cxa_throw+221 (0x1241BB3D) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&)+1049 (0x188BC599) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1443 (0x18841633) NActors::IActor::Receive(TAutoPtr&)+744 (0x13B4A338) ??+0 (0x1241BD1D) __cxa_rethrow_primary_exception+340 (0x1241BF64) std::rethrow_exception(std::exception_ptr)+28 (0x1245DCDC) NActors::IActorExceptionHandler::OnUnhandledException(std::exception_ptr const&)+183 (0x1127A057) ...waiting tablet death 00000.156 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.157 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.157 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.157 II| FAKE_ENV: DS.0 gone, left {62b, 2}, put {62b, 2} 00000.157 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.157 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.157 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.157 II| FAKE_ENV: All BS storage groups are stopped 00000.157 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.157 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 17}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.305881Z ...starting tablet 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.012 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.024 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.033 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.033 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.034 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.085 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.092 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp9g815w/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.093 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.094 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.098 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.098 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.099 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.099 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {440b, 14} 00000.099 II| FAKE_ENV: DS.1 gone, left {395b, 2}, put {860b, 6} 00000.099 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.099 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.099 II| FAKE_ENV: All BS storage groups are stopped 00000.099 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.099 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 26}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.443691Z ...starting tablet 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.023 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.028 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns 00000.031 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.031 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns simultaneously 00000.032 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...erasing row 00000.041 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.042 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...replacing row 00000.043 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.044 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing different values in one column 00000.044 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.046 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing composite primary key 00000.047 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.063 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.083 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.084 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.084 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.142 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.147 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmp6Poc8M/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.147 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.148 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.159 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.160 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.160 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.160 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {1268b, 26} 00000.160 II| FAKE_ENV: DS.1 gone, left {668b, 4}, put {2057b, 18} 00000.160 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.160 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.160 II| FAKE_ENV: All BS storage groups are stopped 00000.160 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.160 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 36}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:46:18.612773Z ...starting tablet 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpZO39z7/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.012 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/d9xi/002317/r3tmp/tmpZO39z7/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NT ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestBadSession::test_simple >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_public_api.py::TestBadSession::test_simple [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.6%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> TieredCache::Touch [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TieredCache::InsertUntouched [GOOD] >> TieredCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::Request_Basics >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages >> TSharedPageCache_Actor::InMemory_ReloadPages [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Active >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TSharedPageCache_Transactions::Compaction >> TSharedPageCache_Transactions::Compaction [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables >> Vacuum::StartVacuumMultipleTables [GOOD] >> Vacuum::StartVacuumWithFollowers [GOOD] >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units >> Vacuum::StartVacuumEmptyTable [GOOD] >> Vacuum::StartVacuumWithRestarts [GOOD] >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> TVersions::WreckHead >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:44.390717Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.017 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.018 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.018 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.018 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.018 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.018 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: All BS storage groups are stopped 00000.018 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.018 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:44.413760Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 84b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{2, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{3, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{4, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{5, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...compacting 00000.012 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Full 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.012 II| TABLET_EXECUTOR: Leader{1:2:8} starting compaction 00000.012 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{1 on 101, Compact{1.2.8, eph 1}} 00000.012 II| TABLET_EXECUTOR: Leader{1:2:9} started compaction 1 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.013 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:69632:397:0] left 397b 00000.013 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:12288:211:0] left 608b 00000.014 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:69632:397:0] result OK flags { Valid } left 211b 00000.014 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:12288:211:0] result OK flags { Valid } left 0b 00000.014 II| OPS_COMPACT: Compact{1.2.8, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (397 0 0)b }, ecr=1.000 00000.015 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 8, product {1 parts epoch 2} done 00000.015 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.015 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.015 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 8 for step 8 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 3 for step 9 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} switch applied on followers, step 9 ...waiting until compacted 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} hope 1 -> done Change{8, redo 72b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...making snapshot with concurrent commit 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxM ... mory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...checking rows (expecting new data and no metadata for old transactions) 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.015 II| TABLET_EXECUTOR: Leader{1:2:7} suiciding, Waste{2:0, 357b +(4, 602b), 6 trc, -602b acc} 00000.017 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.017 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 401 bytes, 401 total, blobs: { [1:2:2:1:8192:209:0], [1:2:5:1:32768:130:0], [1:2:6:1:32768:62:0] } 00000.018 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 271 bytes, 271 total, blobs: { [1:2:3:1:24576:123:0], [1:2:6:1:24576:148:0] } 00000.018 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.018 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 177b, wait} done, Waste{2:0, 357b +(4, 602b), 6 trc} 00000.018 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.019 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.021 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 177 bytes, 177 total, blobs: { [1:3:1:1:28672:177:0] } 00000.021 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 209 bytes, 209 total, blobs: { [1:2:2:1:8192:209:0] } 00000.021 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 148 bytes, 148 total, blobs: { [1:2:6:1:24576:148:0] } 00000.021 II| TABLET_EXECUTOR: Leader{1:4:0} activating executor 00000.021 II| TABLET_EXECUTOR: LSnap{1:4, on 4:1, 177b, wait} done, Waste{2:0, 357b +(0, 0b), 1 trc} 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} 00000.022 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.022 II| TABLET_EXECUTOR: Leader{1:4:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.023 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.023 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.023 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {736b, 11} 00000.023 II| FAKE_ENV: DS.1 gone, left {534b, 3}, put {1540b, 11} 00000.023 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.023 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.023 II| FAKE_ENV: All BS storage groups are stopped 00000.023 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.023 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 80}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:50.499821Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... initializing schema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 64b alter 11b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ... inserting rows 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 512b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 ...compacting table 00000.010 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.010 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.010 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.010 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.010 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.018 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {1 parts epoch 2} done 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 ... observed TEvAttach from [63:30:2062] (leader) 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ... starting follower ... wait first follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.019 II| TABLET_EXECUTOR: LSnap{1:2, on 2:7, 251b, wait} done, Waste{2:0, 770b +(2, 226b), 6 trc} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 2 for step 7 ... observed TEvTablet::TEvFBoot, followerSysActor: [63:51:2087] 00000.020 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.020 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 161 bytes, 161 total, blobs: { [1:2:5:1:12288:161:0] } 00000.021 II| TABLET_EXECUTOR: Follower{1:2:0} activating executor ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) ... stopping leader 00000.021 DD| TABLET_EXECUTOR: Follower{1:2:0} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.021 II| TABLET_EXECUTOR: Leader{1:2:8} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} ... promoting follower ... wait secont follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.023 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 251 bytes, 251 total, blobs: { [1:2:7:1:28672:251:0] } 00000.023 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.024 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.024 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 251b, wait} done, Waste{2:0, 770b +(0, 0b), 1 trc} ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) 00000.024 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.025 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 27b} miss {0 0b} in-memory miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {119b, 4}, put {673b, 11} 00000.025 II| FAKE_ENV: DS.1 gone, left {1637b, 10}, put {1637b, 10} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.025 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 57}, stopped |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::FullScan |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TExecutorDb::EncodedPage [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... .153 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.154 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 1}} 00000.154 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.154 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.154 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.154 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{5 on 2, Compact{1.2.7, eph 2}} 00000.154 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 5 00000.154 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.199 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.199 II| OPS_COMPACT: Compact{1.2.7, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.201 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.201 II| OPS_COMPACT: Compact{1.2.6, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.211 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 7, product {1 parts epoch 3} done 00000.346 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 3 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 6, product {1 parts epoch 0} done 00000.347 II| TABLET_EXECUTOR: Leader{1:2:11} starting compaction 00000.347 II| TABLET_EXECUTOR: Leader{1:2:12} starting Scan{7 on 2, Compact{1.2.11, eph 3}} 00000.347 II| TABLET_EXECUTOR: Leader{1:2:12} started compaction 7 00000.347 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.375 II| TABLET_EXECUTOR: Leader{1:2:12} starting compaction 00000.376 II| TABLET_EXECUTOR: Leader{1:2:13} starting Scan{9 on 2, Compact{1.2.12, eph 2}} 00000.376 II| TABLET_EXECUTOR: Leader{1:2:13} started compaction 9 00000.376 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} begin on TSubset{head 0, 0m 1p 0c} 00000.376 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.376 II| OPS_COMPACT: Compact{1.2.11, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.378 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00000.378 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{11 on 2, Compact{1.2.13, eph 1}} 00000.378 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 11 00000.378 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.419 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 7 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 11, product {1 parts epoch 4} done 00000.441 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.441 II| OPS_COMPACT: Compact{1.2.13, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.448 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.448 II| OPS_COMPACT: Compact{1.2.12, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.451 II| TABLET_EXECUTOR: Leader{1:2:15} Compact 11 on TGenCompactionParams{2: gen 2 epoch 0, 1 parts} step 13, product {1 parts epoch 0} done 00000.451 II| TABLET_EXECUTOR: Leader{1:2:16} Compact 9 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 12, product {1 parts epoch 0} done 00000.452 II| TABLET_EXECUTOR: Leader{1:2:17} starting compaction 00000.452 II| TABLET_EXECUTOR: Leader{1:2:18} starting Scan{13 on 2, Compact{1.2.17, eph 3}} 00000.452 II| TABLET_EXECUTOR: Leader{1:2:18} started compaction 13 00000.452 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} begin on TSubset{head 0, 0m 1p 0c} 00000.452 II| TABLET_EXECUTOR: Leader{1:2:18} starting compaction 00000.452 II| TABLET_EXECUTOR: Leader{1:2:19} starting Scan{15 on 2, Compact{1.2.18, eph 2}} 00000.452 II| TABLET_EXECUTOR: Leader{1:2:19} started compaction 15 00000.452 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.511 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.511 II| OPS_COMPACT: Compact{1.2.17, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.551 II| TABLET_EXECUTOR: Leader{1:2:19} Compact 13 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 17, product {1 parts epoch 0} done 00000.552 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=5}, trace 0 of 0 ~3p 00000.552 II| OPS_COMPACT: Compact{1.2.18, eph 2} end=Done, 6 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.562 II| TABLET_EXECUTOR: Leader{1:2:20} Compact 15 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 18, product {1 parts epoch 0} done 00000.563 II| TABLET_EXECUTOR: Leader{1:2:21} starting compaction 00000.563 II| TABLET_EXECUTOR: Leader{1:2:22} starting Scan{17 on 2, Compact{1.2.21, eph 3}} 00000.563 II| TABLET_EXECUTOR: Leader{1:2:22} started compaction 17 00000.563 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} begin on TSubset{head 0, 0m 2p 0c} 00000.615 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 0 of 0 ~3p 00000.615 II| OPS_COMPACT: Compact{1.2.21, eph 3} end=Done, 6 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.620 II| TABLET_EXECUTOR: Leader{1:2:22} Compact 17 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 21, product {1 parts epoch 0} done 00000.640 II| TABLET_EXECUTOR: Leader{1:2:23} suiciding, Waste{2:0, 20001011b +(44, 90121851b), 22 trc, -90121851b acc} 00000.653 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.653 NN| TABLET_SAUSAGECACHE: Poison cache serviced 27 reqs hit {6 20000188b} miss {21 110000587b} in-memory miss {0 0b} 00000.653 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.653 II| FAKE_ENV: DS.0 gone, left {3608b, 22}, put {3628b, 23} 00000.654 II| FAKE_ENV: DS.1 gone, left {122950b, 32}, put {122950b, 32} 00000.659 II| FAKE_ENV: DS.2 gone, left {110001012b, 29}, put {110001012b, 29} 00000.683 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.683 II| FAKE_ENV: All BS storage groups are stopped 00000.683 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.683 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 82}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:51:12.242235Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.073 II| TABLET_EXECUTOR: Leader{1:2:3} starting compaction 00000.073 II| TABLET_EXECUTOR: Leader{1:2:4} starting Scan{1 on 2, Compact{1.2.3, eph 1}} 00000.074 II| TABLET_EXECUTOR: Leader{1:2:4} started compaction 1 00000.074 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.086 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.086 II| OPS_COMPACT: Compact{1.2.3, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (39360 0 0)b }, ecr=0.004 00000.087 II| TABLET_EXECUTOR: Leader{1:2:4} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.158 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.158 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 2}} 00000.158 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.158 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.226 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} end=Done, 2r seen, TFwd{fetch=38.3KiB,saved=38.3KiB,usage=38.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.226 II| OPS_COMPACT: Compact{1.2.6, eph 2} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.234 II| TABLET_EXECUTOR: Leader{1:2:7} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 6, product {1 parts epoch 3} done 00000.322 II| TABLET_EXECUTOR: Leader{1:2:9} starting compaction 00000.322 II| TABLET_EXECUTOR: Leader{1:2:10} starting Scan{5 on 2, Compact{1.2.9, eph 3}} 00000.323 II| TABLET_EXECUTOR: Leader{1:2:10} started compaction 5 00000.323 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.418 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} end=Done, 2r seen, TFwd{fetch=76.8KiB,saved=76.8KiB,usage=76.8KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.418 II| OPS_COMPACT: Compact{1.2.9, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.487 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 9, product {1 parts epoch 4} done 00000.489 II| TABLET_EXECUTOR: Leader{1:2:11} suiciding, Waste{2:0, 79404b +(7, 236400b), 10 trc, -236400b acc} 00000.495 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.495 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {4 117917b} miss {0 0b} in-memory miss {0 0b} 00000.495 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.495 II| FAKE_ENV: DS.0 gone, left {1034b, 10}, put {1054b, 11} 00000.495 II| FAKE_ENV: DS.1 gone, left {316163b, 14}, put {316163b, 14} 00000.500 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.500 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.500 II| FAKE_ENV: All BS storage groups are stopped 00000.500 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.500 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 40}, stopped |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> TVersions::Wreck0Reverse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 2026-01-10T12:50:30.309317Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:50:30.309892Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-10T12:50:30.309931Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [1:5:2052] 2026-01-10T12:50:30.310001Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [1:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-10T12:50:30.310086Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #1 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #1 (done) Checking fetches#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] ... waiting for results #1 2026-01-10T12:50:30.310893Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 3 ] 2026-01-10T12:50:30.310976Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [1:5:2052] class Online pages [ 1 2 3 ] cookie 1 2026-01-10T12:50:30.311031Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 366B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #1 (done) Checking results#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] 2026-01-10T12:50:30.369734Z node 2 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:50:30.370211Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-10T12:50:30.370253Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:5:2052] 2026-01-10T12:50:30.370329Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-10T12:50:30.370431Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-10T12:50:30.370551Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-10T12:50:30.370585Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:5:2052] 2026-01-10T12:50:30.370639Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:5:2052] cookie 2 class Online from cache [ ] already requested [ ] to request [ 4 5 ] 2026-01-10T12:50:30.370697Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 610B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-10T12:50:30.370803Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:6:2053] 2026-01-10T12:50:30.370861Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:6:2053] cookie 3 class Online from cache [ ] already requested [ ] to request [ 5 6 ] 2026-01-10T12:50:30.370917Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 854B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:50:30.371023Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:6:2053] 2026-01-10T12:50:30.371079Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:6:2053] cookie 4 class Online from cache [ ] already requested [ ] to request [ 6 7 ] 2026-01-10T12:50:30.371133Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 1.07KiB EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #4 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] ... waiting for results #4 2026-01-10T12:50:30.371485Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status ERROR pages [ 1 2 3 ] 2026-01-10T12:50:30.371515Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1076: Drop page collection [1:0:256:0:0:0:1] error ERROR 2026-01-10T12:50:30.371542Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:5:2052] class Online error ERROR cookie 1 2026-01-10T12:50:30.371584Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:6:2053] class Online error ERROR cookie 3 2026-01-10T12:50:30.371633Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 732B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] 2026-01-10T12:50:30.371835Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 6 ] 2026-01-10T12:50:30.371864Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 488B EvictedInMemoryBytes: 0B Checking results#4 Expected: Actual: ... waiting for results #4 2026-01-10T12:50:30.382240Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 6 7 ] 2026-01-10T12:50:30.382301Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:6:2053] class Online pages [ 6 7 ] cookie 4 2026-01-10T12:50:30.382346Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B 2026-01-10T12:50:30.382386Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 4 5 ] 2026-01-10T12:50:30.382406Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:5:2052] class Online pages [ 4 5 ] cookie 2 2026-01-10T12:50:30.382427Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 488B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] 2026-01-10T12:50:30.440047Z node 3 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-10T12:50:30.440476Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-10T12:50:30.440524Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [3:5:2052] 2026-01-10T12:50:30.440588Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [3:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2026-01-10T12:50:30.440616Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] 2026-01-10T12:50:30.440690Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-10T12:50:30.440795Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-10T12:50:30.440818Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [3:6:2053] 2026-01-10T12:50:30.440862Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [3:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-10T12:50:30.440904Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#2 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] ... waiting for fetches #2 2026-01-10T12:50:30.441026Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2026-01-10T12:50:30.441060Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 3 4 ] 2026-01-10T12:50:30.441102Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedI ... 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.024 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.025 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.026 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.354368Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.054 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.055 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.056 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} in-memory miss {0 0b} 00000.056 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.056 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.056 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.056 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.056 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2093b, 23} 00000.056 II| FAKE_ENV: All BS storage groups are stopped 00000.056 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.056 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.416907Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.035 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.036 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} in-memory miss {0 0b} 00000.036 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.036 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1824b, 23} 00000.036 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.036 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.036 II| FAKE_ENV: All BS storage groups are stopped 00000.036 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.036 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.458294Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.028 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.029 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} in-memory miss {0 0b} 00000.029 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.030 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.030 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.030 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: All BS storage groups are stopped 00000.030 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.030 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.493394Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.027 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.028 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} in-memory miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1567b, 23} 00000.028 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: DS.1 gone, left {529b, 3}, put {197610b, 21} 00000.028 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: All BS storage groups are stopped 00000.028 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.028 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.526538Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.011 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.011 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.012 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.012 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.012 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.012 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.012 II| FAKE_ENV: All BS storage groups are stopped 00000.012 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.012 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.543329Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.045 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.045 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.046 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} in-memory miss {0 0b} 00000.046 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.046 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.046 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.046 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.046 II| FAKE_ENV: All BS storage groups are stopped 00000.046 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.047 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.596327Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.020 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.020 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.021 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.021 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.021 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.021 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.021 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.021 II| FAKE_ENV: All BS storage groups are stopped 00000.021 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.021 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.626965Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR cookie 0 00000.038 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult (done) 00000.049 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.050 NN| TABLET_SAUSAGECACHE: Poison cache serviced 5 reqs hit {8 205278b} miss {0 0b} in-memory miss {0 0b} 00000.051 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.051 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {105547b, 14} 00000.051 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {102560b, 2} 00000.051 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.051 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1353b, 17} 00000.051 II| FAKE_ENV: All BS storage groups are stopped 00000.051 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.051 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-10T12:50:34.689950Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.230 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled 00000.241 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.242 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.242 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.242 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.242 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.242 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {499b, 9} 00000.242 II| FAKE_ENV: DS.1 gone, left {425b, 4}, put {460b, 5} 00000.242 II| FAKE_ENV: All BS storage groups are stopped 00000.243 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 17.71s 00000.243 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_public_api.py::TestAttributes::test_create_table >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] >> test_public_api.py::TestDocApiTables::test_create_table >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] >> ConsistentIndexRead::InteractiveTx >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> Backup::UuidValue >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> Backup::UuidValue [GOOD] >> test_workload.py::TestYdbWorkload::test[row] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/data_00.csv.sha256" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/metadata.json.sha256" Found S3 object: "ProducerUuidValueBackup/permissions.pb" Found S3 object: "ProducerUuidValueBackup/permissions.pb.sha256" Found S3 object: "ProducerUuidValueBackup/scheme.pb" Found S3 object: "ProducerUuidValueBackup/scheme.pb.sha256" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test >> Transfer_RowTable::KeyColumnFirst >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> Transfer_RowTable::KeyColumnFirst [GOOD] >> Transfer_RowTable::KeyColumnLast >> Transfer_RowTable::KeyColumnLast [GOOD] >> Transfer_RowTable::ComplexKey |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> Transfer_RowTable::ComplexKey [GOOD] >> Transfer_RowTable::NullableColumn |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> Transfer_RowTable::NullableColumn [GOOD] >> Transfer_RowTable::WriteNullToKeyColumn >> Transfer_RowTable::WriteNullToKeyColumn [GOOD] >> Transfer_RowTable::WriteNullToColumn >> Transfer_RowTable::WriteNullToColumn [GOOD] >> Transfer_RowTable::Upsert_DifferentBatch >> Transfer_RowTable::Upsert_DifferentBatch [GOOD] >> Transfer_RowTable::Upsert_OneBatch >> KqpQuerySession::NoLocalAttach >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> Transfer_RowTable::Upsert_OneBatch [GOOD] >> Transfer_RowTable::ColumnType_Bool >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> Replication::Types >> Transfer_RowTable::ColumnType_Bool [GOOD] >> Transfer_RowTable::ColumnType_Date >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> Transfer_RowTable::ColumnType_Date [GOOD] >> Transfer_RowTable::ColumnType_Double >> Transfer_RowTable::ColumnType_Double [GOOD] >> Transfer_RowTable::ColumnType_Int8 >> Replication::PauseAndResumeReplication [GOOD] >> Replication::TopicAutopartitioning >> Transfer_RowTable::ColumnType_Int8 [GOOD] >> Transfer_RowTable::ColumnType_Int16 >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> Transfer_RowTable::ColumnType_Int16 [GOOD] >> Transfer_RowTable::ColumnType_Int32 >> Transfer_RowTable::ColumnType_Int32 [GOOD] >> Transfer_RowTable::ColumnType_Int64 >> KqpQuerySession::NoLocalAttach [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> Transfer_RowTable::ColumnType_Int64 [GOOD] >> Transfer_RowTable::ColumnType_Utf8_LongValue >> test_workload.py::TestYdbWorkload::test[row] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_session/unittest >> S3PathStyleBackup::DisableVirtualAddressing >> Transfer_RowTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_RowTable::ColumnType_Uuid >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test_kafka_streams.py::TestYdbTopicWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer_RowTable::ColumnType_Uuid [GOOD] >> Transfer_RowTable::MessageField_Attributes |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> Transfer_RowTable::MessageField_Attributes [GOOD] >> Transfer_RowTable::MessageField_CreateTimestamp |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/s3_path_style/unittest >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_workload.py::TestYdbBackupWorkload::test >> Transfer_RowTable::MessageField_CreateTimestamp [GOOD] >> Transfer_RowTable::MessageField_Partition >> test_workload.py::TestYdbWorkload::test[column] >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] >> Transfer_RowTable::MessageField_Partition [GOOD] >> Transfer_RowTable::MessageField_SeqNo |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::MessageField_SeqNo [GOOD] >> Transfer_RowTable::MessageField_ProducerId >> test_workload.py::TestYdbTestShardWorkload::test >> Transfer_RowTable::MessageField_ProducerId [GOOD] >> Transfer_RowTable::MessageField_MessageGroupId >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> Transfer_RowTable::MessageField_MessageGroupId [GOOD] >> Transfer_RowTable::MessageField_WriteTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] 2026-01-10 12:56:42,429 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:56:43,121 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 851789 838M 839M 761M ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/d9xi/003acf/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 961903 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 963598 1.6G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 963812 1.6G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 964273 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 200, in test_turn_on_serverless_storage_billing time.sleep(15) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007bbee92cb640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007bbee7fc1640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007bbeeadd6640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007bbef363a640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007bbef0507640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007bbef6f9a640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fbf274ea940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003acf', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003acf/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003acf', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003acf/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003acf/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2026-01-10 12:57:13,809 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-10 12:57:13,816 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> Transfer_RowTable::MessageField_WriteTimestamp [GOOD] >> Transfer_RowTable::ProcessingJsonMessage >> test_workload_topic.py::TestYdbTopicWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/cdc/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test[row-local] >> Transfer_RowTable::ProcessingJsonMessage [GOOD] >> Transfer_RowTable::ProcessingCDCMessage >> test_encryption.py::TestEncryption::test_simple_encryption |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] [GOOD] >> Transfer_RowTable::ProcessingCDCMessage [GOOD] >> Transfer_RowTable::ProcessingTargetTable |99.9%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer_RowTable::ProcessingTargetTable [GOOD] >> Transfer_RowTable::ProcessingTargetTableOtherType >> test_workload.py::TestYdbWorkload::test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] 2026-01-10 12:57:03,863 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-10 12:57:04,780 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 859335 1.2G 1.2G 1.1G ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/d9xi/003ab2/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 984076 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 985451 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 985508 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t 985809 1.5G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 534, in test_database_with_column_disk_quotas rows = [BulkUpsertRow((now + datetime.timedelta(microseconds=dt))) for dt in range(i * bulk_size, (i + 1) * bulk_size)] File "ydb/tests/functional/serverless/test_serverless.py", line 519, in __init__ self.value_string = ''.join(random.choices(string.ascii_lowercase, k=1024)) File "contrib/tools/python3/Lib/random.py", line 467, in choices return [population[floor(random() * n)] for i in _repeat(None, k)] File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b3022c71640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3035ea9640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b302a3c5640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b30283a2640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b302b1e8640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b302cffb640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b302c4f2640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3040d21640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3042041640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3044681640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f3074bab940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003ab2', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003ab2/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/d9xi/003ab2', '--source-root', '/home/runner/.ya/build/build_root/d9xi/003ab2/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/d9xi/003ab2/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2026-01-10 12:57:35,940 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-10 12:57:35,944 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] >> Transfer_RowTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_RowTable::DropColumn >> Transfer_RowTable::DropColumn [GOOD] >> Transfer_RowTable::TableWithSyncIndex >> Transfer_RowTable::TableWithSyncIndex [GOOD] >> Transfer_RowTable::TableWithAsyncIndex |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer_RowTable::TableWithAsyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/row_table/unittest >> Transfer_RowTable::TableWithAsyncIndex [GOOD] Test command err: DDL: CREATE TABLE `Table_14312698883980169788` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14312698883980169788` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_14312698883980169788` FROM `Topic_14312698883980169788` TO `Table_14312698883980169788` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14312698883980169788` ORDER BY `Key`, `Message` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_14312698883980169788`; DDL: DROP TABLE `Table_14312698883980169788` DDL: DROP TOPIC `Topic_14312698883980169788` DDL: CREATE TABLE `Table_3829889869838486319` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3829889869838486319` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_3829889869838486319` FROM `Topic_3829889869838486319` TO `Table_3829889869838486319` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_3829889869838486319` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_3829889869838486319` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_3829889869838486319` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_3829889869838486319`; DDL: DROP TABLE `Table_3829889869838486319` DDL: DROP TOPIC `Topic_3829889869838486319` DDL: CREATE TABLE `Table_3615369381157195670` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3615369381157195670` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_3615369381157195670` FROM `Topic_3615369381157195670` TO `Table_3615369381157195670` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_3615369381157195670` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_3615369381157195670` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_3615369381157195670` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_3615369381157195670`; DDL: DROP TABLE `Table_3615369381157195670` DDL: DROP TOPIC `Topic_3615369381157195670` DDL: CREATE TABLE `Table_12938909044988758572` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_12938909044988758572` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_12938909044988758572` FROM `Topic_12938909044988758572` TO `Table_12938909044988758572` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_12938909044988758572` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12938909044988758572` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12938909044988758572` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_12938909044988758572`; DDL: DROP TABLE `Table_12938909044988758572` DDL: DROP TOPIC `Topic_12938909044988758572` DDL: CREATE TABLE `Table_1335689819637444246` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_1335689819637444246` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_1335689819637444246` FROM `Topic_1335689819637444246` TO `Table_1335689819637444246` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_1335689819637444246`; DDL: DROP TABLE `Table_1335689819637444246` DDL: DROP TOPIC `Topic_1335689819637444246` DDL: CREATE TABLE `Table_9756315382989862023` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_9756315382989862023` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_9756315382989862023` FROM `Topic_9756315382989862023` TO `Table_9756315382989862023` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_9756315382989862023`; DDL: DROP TABLE `Table_9756315382989862023` DDL: DROP TOPIC `Topic_9756315382989862023` DDL: CREATE TABLE `Table_12093777803289688897` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_12093777803289688897` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_12093777803289688897` FROM `Topic_12093777803289688897` TO `Table_12093777803289688897` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_12093777803289688897` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_12093777803289688897` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_12093777803289688897` ORDER BY `Message` Attempt=17 count=1 >>>>> Query: SELECT `Message` FROM `Table_12093777803289688897` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_12093777803289688897`; DDL: DROP TABLE `Table_12093777803289688897` DDL: CREATE TABLE `Table_1258029381466460774` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_1258029381466460774` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { ... NULL, timestamp Datetime NOT NULL, operation Utf8, PRIMARY KEY (object_id, timestamp) ) WITH ( STORE = ROW ) DDL: ALTER TABLE `SourceTable_15922351123697182064` ADD CHANGEFEED `cdc_15922351123697182064` WITH ( MODE = 'UPDATES', FORMAT = 'JSON' ) DDL: CREATE TABLE `Table_15922351123697182064` ( timestamp Datetime NOT NULL, object_id Utf8 NOT NULL, operation Utf8, PRIMARY KEY (timestamp, object_id) ) WITH ( STORE = ROW ) DDL: $l = ($x) -> { $d = CAST($x._data AS JSON); return [ <| timestamp: Unwrap(DateTime::MakeDatetime(DateTime::ParseIso8601(CAST(Yson::ConvertToString($d.key[1]) AS Utf8)))), object_id: Unwrap(CAST(Yson::ConvertToString($d.key[0]) AS Utf8)), operation: CAST(Yson::ConvertToString($d.update.operation) AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15922351123697182064` FROM `SourceTable_15922351123697182064/cdc_15922351123697182064` TO `Table_15922351123697182064` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: INSERT INTO `SourceTable_15922351123697182064` (`object_id`, `timestamp`, `operation`) VALUES ('id_1', Datetime('2019-01-01T15:30:00Z'), 'value_1'); >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=19 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=18 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=17 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=16 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=15 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_15922351123697182064` ORDER BY `operation`, `object_id`, `timestamp` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_15922351123697182064`; DDL: DROP TABLE `Table_15922351123697182064` DDL: DROP TABLE `SourceTable_15922351123697182064` DDL: CREATE TABLE `Table_11376471241178995505` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_11376471241178995505_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_11376471241178995505_2` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_11376471241178995505` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_11376471241178995505_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, <| __ydb_table: "Table_11376471241178995505_2", Key: $x._offset, Message:CAST($x._data || "_2" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_11376471241178995505` FROM `Topic_11376471241178995505` TO `Table_11376471241178995505` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505` ORDER BY `Key`, `Message` Attempt=16 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505_1` ORDER BY `Key`, `Message` Attempt=19 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_11376471241178995505_2` ORDER BY `Key`, `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_11376471241178995505`; DDL: DROP TABLE `Table_11376471241178995505` DDL: DROP TOPIC `Topic_11376471241178995505` DDL: CREATE TABLE `Table_3444765492210347845` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_3444765492210347845_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_3444765492210347845` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_3444765492210347845_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_3444765492210347845` FROM `Topic_3444765492210347845` TO `Table_3444765492210347845` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table '/local/Table_3444765492210347845_1' Only the OLTP table is supported } } >>>>> EXPECTED: Error: Bulk upsert to table '/local/Table_ DDL: DROP TRANSFER `Transfer_3444765492210347845`; DDL: DROP TABLE `Table_3444765492210347845` DDL: DROP TOPIC `Topic_3444765492210347845` DDL: CREATE TABLE `Table_10584947685091083537` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10584947685091083537` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_10584947685091083537` FROM `Topic_10584947685091083537` TO `Table_10584947685091083537` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_10584947685091083537` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_10584947685091083537` ORDER BY `Message` Attempt=18 count=1 DDL: ALTER TABLE Table_10584947685091083537 DROP COLUMN Message >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_10584947685091083537' Unknown column: Message } } >>>>> EXPECTED: Unknown column: Message DDL: CREATE TABLE `Table_16206329515968831490` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL SYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16206329515968831490` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16206329515968831490` FROM `Topic_16206329515968831490` TO `Table_16206329515968831490` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_16206329515968831490' Only async-indexed tables are supported by BulkUpsert } } >>>>> EXPECTED: Only async-indexed tables are supported by BulkUpsert DDL: DROP TRANSFER `Transfer_16206329515968831490`; DDL: DROP TABLE `Table_16206329515968831490` DDL: DROP TOPIC `Topic_16206329515968831490` DDL: CREATE TABLE `Table_2173751088658397502` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL ASYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_2173751088658397502` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_2173751088658397502` FROM `Topic_2173751088658397502` TO `Table_2173751088658397502` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:5355/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_2173751088658397502` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_2173751088658397502` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_2173751088658397502` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_2173751088658397502`; DDL: DROP TABLE `Table_2173751088658397502` DDL: DROP TOPIC `Topic_2173751088658397502` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/row_table/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/table/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/table/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/table/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/stress/simple_queue/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[row-local] [GOOD] >> test_workload.py::TestYdbBackupWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/backup/tests/py3test >> test_workload.py::TestYdbBackupWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/backup/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/backup/tests/py3test >> Transfer_ColumnTable::KeyColumnFirst >> test_workload.py::TestYdbWorkload::test[row-remote] >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_ColumnTable::KeyColumnFirst [GOOD] >> Transfer_ColumnTable::KeyColumnLast |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kafka/tests/py3test >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kafka/tests/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/viewer/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/viewer/tests/py3test >> Transfer_ColumnTable::KeyColumnLast [GOOD] >> Transfer_ColumnTable::ComplexKey >> test_value_workload.py::TestResultSetValueWorkload::test >> Transfer_ColumnTable::ComplexKey [GOOD] >> Transfer_ColumnTable::NullableColumn >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/testshard_workload/tests/py3test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/encryption/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/testshard_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/testshard_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> Transfer_ColumnTable::NullableColumn [GOOD] >> Transfer_ColumnTable::WriteNullToKeyColumn >> Transfer_ColumnTable::WriteNullToKeyColumn [GOOD] >> Transfer_ColumnTable::WriteNullToColumn >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestDeltaProtocol::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/ctas/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/ctas/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/ctas/tests/py3test >> KqpQueryService::ReplyPartLimitProxyNode >> Transfer_ColumnTable::WriteNullToColumn [GOOD] >> Transfer_ColumnTable::Upsert_DifferentBatch >> Transfer::BaseScenario_Local >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_workload.py::TestYdbKvWorkload::test[column] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] >> Transfer_ColumnTable::Upsert_DifferentBatch [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> Transfer::BaseScenario_Local [GOOD] >> Transfer::BaseScenario_Remote >> test_workload.py::TestYdbWorkload::test[row-remote] [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch [GOOD] >> Transfer_ColumnTable::ColumnType_Date |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/view/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/view/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/view/tests/py3test >> Transfer::BaseScenario_Remote [GOOD] >> Transfer::CreateTransfer_TargetNotFound >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Transfer::ConnectionString_BadChar >> Transfer::ConnectionString_BadChar [GOOD] >> Transfer::ConnectionString_BadDNSName >> test_workload.py::TestYdbWorkload::test[column-local] >> Transfer::ConnectionString_BadDNSName [GOOD] >> Transfer::Create_WithPermission >> Transfer::Create_WithPermission [GOOD] >> Transfer::Create_WithoutTablePermission |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic_kafka/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic_kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic_kafka/tests/py3test >> Transfer::Create_WithoutTablePermission [GOOD] >> Transfer::Create_WithoutAlterTopicPermission_AndGrant >> Transfer_ColumnTable::ColumnType_Date [GOOD] >> Transfer_ColumnTable::ColumnType_Double >> Transfer::Create_WithoutAlterTopicPermission_AndGrant [GOOD] >> Transfer::LocalTopic_WithPermission >> Transfer::LocalTopic_WithPermission [GOOD] >> Transfer::LocalTopic_BigMessage >> Transfer_ColumnTable::ColumnType_Double [GOOD] >> Transfer_ColumnTable::ColumnType_Int8 >> test_workload.py::TestYdbWorkload::test [FAIL] >> Transfer_ColumnTable::ColumnType_Int8 [GOOD] >> Transfer_ColumnTable::ColumnType_Int16 >> NodeIdDescribe::HasDistribution [GOOD] >> Replication::TopicAutopartitioning [GOOD] >> Transfer::LocalTopic_BigMessage [FAIL] >> Transfer::AlterLambda |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::TopicAutopartitioning [GOOD] Test command err: DDL: CREATE TABLE `SourceTable_15753742090797872749` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_15753742090797872749` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_15753742090797872749` FOR `SourceTable_15753742090797872749` AS `Table_15753742090797872749` WITH ( CONNECTION_STRING = 'grpc://localhost:28504/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_15753742090797872749` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_15753742090797872749]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_15753742090797872749` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=18 count=1 DDL: DROP ASYNC REPLICATION `Replication_15753742090797872749`; DDL: DROP TABLE `SourceTable_15753742090797872749` DDL: CREATE TABLE `SourceTable_16109530938340840402` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_16109530938340840402` FOR `SourceTable_16109530938340840402` AS `Table_16109530938340840402` WITH ( CONNECTION_STRING = 'grpc://localhost:28504/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_16109530938340840402` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query error:
: Error: Scheme changed. Table: `/local/SourceTable_16109530938340840402`., code: 2028
: Error: Cannot parse tx 4. SCHEME_CHANGED: Table '/local/SourceTable_16109530938340840402' scheme changed. at tablet# 72075186224037893, code: 2034
: Error: Query invalidated on scheme/internal error during Data execution, code: 2019 >>>>> Query: INSERT INTO `SourceTable_16109530938340840402` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_16109530938340840402` ORDER BY `Message` Attempt=19 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_16109530938340840402` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_16109530938340840402` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_16109530938340840402` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_16109530938340840402` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_16109530938340840402` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_16109530938340840402` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_16109530938340840402` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_16109530938340840402` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_16109530938340840402`; DDL: DROP TABLE `SourceTable_16109530938340840402` DDL: CREATE TABLE `SourceTable_6351682480264480761` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( AUTO_PARTITIONING_BY_SIZE = ENABLED, AUTO_PARTITIONING_PARTITION_SIZE_MB = 1, AUTO_PARTITIONING_BY_LOAD = ENABLED, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = 10, UNIFORM_PARTITIONS = 1 ); DDL: CREATE ASYNC REPLICATION `Replication_6351682480264480761` FOR `SourceTable_6351682480264480761` AS `Table_6351682480264480761` WITH ( CONNECTION_STRING = 'grpc://localhost:28504/?database=local' ); Insert much data to trigger split >>>>> Query: UPSERT INTO `SourceTable_6351682480264480761` (Key, Message) VALUES (0, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), (4999, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); Wait for partition split and data replication Verify data replicated from all partitions >>>>> Query: SELECT `Message` FROM `Table_6351682480264480761` ORDER BY `Message` Attempt=19 count=5000 DDL: DROP ASYNC REPLICATION `Replication_6351682480264480761`; DDL: DROP TABLE `SourceTable_6351682480264480761` |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/replication/unittest >> Transfer_ColumnTable::ColumnType_Int16 [GOOD] >> Transfer_ColumnTable::ColumnType_Int32 |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> Transfer::AlterLambda [GOOD] >> Transfer::EnsureError >> Transfer::EnsureError [GOOD] >> Transfer::CheckCommittedOffset_Local >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::CheckCommittedOffset_Local [GOOD] >> Transfer::CheckCommittedOffset_Remote >> Transfer_ColumnTable::ColumnType_Int32 [GOOD] >> Transfer_ColumnTable::ColumnType_Int64 >> Transfer::CheckCommittedOffset_Remote [GOOD] >> Transfer::DropTransfer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer >> Transfer_ColumnTable::ColumnType_Int64 [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::PausedAfterError |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/s3_backups/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/s3_backups/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/s3_backups/tests/py3test >> Transfer::PausedAfterError [GOOD] >> Transfer::DescribeTransferWithErrorTopicNotFound >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> Transfer::DescribeTransferWithErrorTopicNotFound [GOOD] >> Transfer::CustomConsumer >> Transfer_ColumnTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_ColumnTable::MessageField_Attributes >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomConsumer_NotExists_Remote |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kv/tests/py3test >> Transfer_ColumnTable::MessageField_Attributes [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp >> Transfer::CustomConsumer_NotExists_Remote [GOOD] >> Transfer::CustomConsumer_NotExists_Local >> Transfer::CustomConsumer_NotExists_Local [GOOD] >> Transfer::CustomFlushInterval >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test[column-local] [GOOD] >> test_workload.py::TestDeltaProtocol::test [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp [GOOD] >> Transfer_ColumnTable::MessageField_Partition >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/node_broker/tests/py3test >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/node_broker/tests/py3test >> Transfer_ColumnTable::MessageField_Partition [GOOD] >> Transfer_ColumnTable::MessageField_SeqNo >> test_workload.py::TestYdbWorkload::test[column-remote] |99.9%| [TM] {RESULT} ydb/tests/stress/node_broker/tests/py3test >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize >> Transfer::AlterBatchSize [GOOD] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceNotExists_LocalTopic >> Transfer::CreateTransferSourceNotExists_LocalTopic [GOOD] >> Transfer::CreateTransferSourceDirNotExists >> Transfer::CreateTransferSourceDirNotExists [GOOD] >> Transfer::CreateTransferSourceDirNotExists_LocalTopic >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> Transfer::CreateTransferSourceDirNotExists_LocalTopic [GOOD] >> Transfer::TransferSourceDropped >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> Transfer_ColumnTable::MessageField_SeqNo [GOOD] >> Transfer_ColumnTable::MessageField_ProducerId >> Transfer::TransferSourceDropped [GOOD] >> Transfer::TransferSourceDropped_LocalTopic |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> Transfer::TransferSourceDropped_LocalTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer_ColumnTable::MessageField_ProducerId [GOOD] >> Transfer_ColumnTable::MessageField_MessageGroupId >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer::CreateTransferTargetNotExists [GOOD] >> Transfer::PauseAndResumeTransfer >> Transfer_ColumnTable::MessageField_MessageGroupId [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp >> test_value_workload.py::TestResultSetValueWorkload::test [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp [GOOD] >> Transfer_ColumnTable::ProcessingJsonMessage |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/streaming/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::PauseAndResumeTransfer [GOOD] >> Transfer::TargetTableWithoutDirectory >> Transfer::TargetTableWithoutDirectory [GOOD] >> Transfer::TargetTableWriteOutsideDirectory >> Transfer_ColumnTable::ProcessingJsonMessage [GOOD] >> Transfer_ColumnTable::ProcessingCDCMessage >> Transfer::TargetTableWriteOutsideDirectory [GOOD] >> Transfer::TargetTableWriteInsideDirectory >> Transfer::TargetTableWriteInsideDirectory [GOOD] >> test_arrow_workload.py::TestResultSetArrowWorkload::test >> Transfer::AlterTargetDirectory >> Transfer_ColumnTable::ProcessingCDCMessage [GOOD] >> Transfer_ColumnTable::ProcessingTargetTable >> Transfer::AlterTargetDirectory [GOOD] >> Transfer::WriteToNotExists |99.9%| [TM] {RESULT} ydb/tests/stress/streaming/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/streaming/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> Transfer_ColumnTable::ProcessingTargetTable [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType >> Transfer::WriteToNotExists [GOOD] >> Transfer::WriteToNotTable >> Transfer::WriteToNotTable [GOOD] >> Transfer::AlterLambdaOnWork >> Transfer_ColumnTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_ColumnTable::DropColumn >> Transfer::AlterLambdaOnWork [GOOD] >> Transfer::CreateAndAlterTransferInDirectory >> Transfer::CreateAndAlterTransferInDirectory [GOOD] >> Transfer::Alter_WithSecret >> Transfer::Alter_WithSecret [GOOD] >> Transfer::MessageField_Key >> Transfer_ColumnTable::DropColumn [GOOD] >> Transfer_ColumnTable::BigBatchSize_Remote >> Transfer::MessageField_Key [GOOD] >> Transfer::MessageField_Key_Empty >> Transfer::MessageField_Key_Empty [GOOD] >> Transfer::ErrorInMultiLine >> Transfer::ErrorInMultiLine [GOOD] >> Transfer::ReadFromCDC_Remote >> Transfer::ReadFromCDC_Remote [GOOD] >> Transfer::ReadFromCDC_Local >> Transfer_ColumnTable::BigBatchSize_Remote [GOOD] >> Transfer_ColumnTable::BigBatchSize_Local >> Transfer::ReadFromCDC_Local [GOOD] >> Transfer::MessageField_CreateTimestamp_Remote >> Transfer::MessageField_CreateTimestamp_Remote [GOOD] >> Transfer::MessageField_CreateTimestamp_Local >> Transfer::MessageField_CreateTimestamp_Local [GOOD] >> Transfer::MessageField_WriteTimestamp_Remote >> Transfer::MessageField_WriteTimestamp_Remote [GOOD] >> Transfer::MessageField_WriteTimestamp_Local >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] >> Transfer::MessageField_WriteTimestamp_Local [GOOD] >> Transfer::MessageField_Attributes_Remote ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/column_table/unittest >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_1981620246879603053` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1981620246879603053` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_1981620246879603053` FROM `Topic_1981620246879603053` TO `Table_1981620246879603053` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=14 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_1981620246879603053` ORDER BY `Key`, `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_1981620246879603053`; DDL: DROP TABLE `Table_1981620246879603053` DDL: DROP TOPIC `Topic_1981620246879603053` DDL: CREATE TABLE `Table_12339971209435018681` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_12339971209435018681` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_12339971209435018681` FROM `Topic_12339971209435018681` TO `Table_12339971209435018681` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_12339971209435018681` ORDER BY `Key`, `Message` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_12339971209435018681`; DDL: DROP TABLE `Table_12339971209435018681` DDL: DROP TOPIC `Topic_12339971209435018681` DDL: CREATE TABLE `Table_9938731016696574037` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_9938731016696574037` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_9938731016696574037` FROM `Topic_9938731016696574037` TO `Table_9938731016696574037` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9938731016696574037` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9938731016696574037` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9938731016696574037` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_9938731016696574037`; DDL: DROP TABLE `Table_9938731016696574037` DDL: DROP TOPIC `Topic_9938731016696574037` DDL: CREATE TABLE `Table_4974453384804057942` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_4974453384804057942` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4974453384804057942` FROM `Topic_4974453384804057942` TO `Table_4974453384804057942` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_4974453384804057942` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4974453384804057942` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4974453384804057942` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_4974453384804057942`; DDL: DROP TABLE `Table_4974453384804057942` DDL: DROP TOPIC `Topic_4974453384804057942` DDL: CREATE TABLE `Table_16745369410503579206` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16745369410503579206` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16745369410503579206` FROM `Topic_16745369410503579206` TO `Table_16745369410503579206` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_16745369410503579206`; DDL: DROP TABLE `Table_16745369410503579206` DDL: DROP TOPIC `Topic_16745369410503579206` DDL: CREATE TABLE `Table_1465162964717814614` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1465162964717814614` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_1465162964717814614` FROM `Topic_1465162964717814614` TO `Table_1465162964717814614` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_1465162964717814614`; DDL: DROP TABLE `Table_1465162964717814614` DDL: DROP TOPIC `Topic_1465162964717814614` DDL: CREATE TABLE `Table_15580259406330763500` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_15580259406330763500` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15580259406330763500` FROM `Topic_15580259406330763500` TO `Table_15580259406330763500` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:17155/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_15580259406330763500` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_15580259406330763500` ORDER BY `Message` Attempt=18 count=1 >>>>> Query: SELECT `Message` FROM `Table_15580259406330763500` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_15580259406330763500`; DDL: DROP TABLE `Table_15580259406330763500` DDL: CREATE TABLE `Table_16741342164062455316` ( ... ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------' |>; }; return ListMap($lines, $m); }; ; CREATE TRANSFER `Transfer_17283031039225045081` FROM `Topic_17283031039225045081` TO `Table_17283031039225045081` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 1073741824 ); >>>>> Query: SELECT `offset`, `line` FROM `Table_17283031039225045081` ORDER BY `offset`, `line` Attempt=19 count=1802 DDL: DROP TRANSFER `Transfer_17283031039225045081`; DDL: DROP TABLE `Table_17283031039225045081` DDL: DROP TOPIC `Topic_17283031039225045081` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/column_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/column_table/unittest >> Transfer::MessageField_Attributes_Remote [GOOD] >> Transfer::MessageField_Attributes_Local >> Transfer::MessageField_Attributes_Local [GOOD] >> Transfer::MessageField_Partition_Remote >> Transfer::MessageField_Partition_Remote [GOOD] >> Transfer::MessageField_Partition_Local >> Transfer::MessageField_Partition_Local [GOOD] >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/functional/unittest >> Transfer::MessageField_Partition_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_7778757543080945303` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_7778757543080945303` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_7778757543080945303` FROM `Topic_7778757543080945303` TO `Table_7778757543080945303` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_7778757543080945303` ORDER BY `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_7778757543080945303`; DDL: DROP TABLE `Table_7778757543080945303` DDL: DROP TOPIC `Topic_7778757543080945303` DDL: CREATE TABLE `Table_9794462612399310386` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_9794462612399310386` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_9794462612399310386` FROM `Topic_9794462612399310386` TO `Table_9794462612399310386` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=13 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=12 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=11 count=0 >>>>> Query: SELECT `Message` FROM `Table_9794462612399310386` ORDER BY `Message` Attempt=10 count=1 DDL: DROP TRANSFER `Transfer_9794462612399310386`; DDL: DROP TABLE `Table_9794462612399310386` DDL: DROP TOPIC `Topic_9794462612399310386` DDL: CREATE TOPIC `Topic_4790294769922094715` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_4790294769922094715` FROM `Topic_4790294769922094715` TO `Table_4790294769922094715` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: [ {
: Error: Executing ESchemeOpCreateTransfer, code: 2003 subissue: {
: Error: Path does not exist, code: 2003 } } {
: Error: Query invalidated on scheme/internal error during Scheme execution, code: 2019 } ] >>>>> EXPECTED: Path does not exist DDL: DROP TOPIC `Topic_4790294769922094715` DDL: CREATE TABLE `Table_18151156170844688127` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_18151156170844688127` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_18151156170844688127 FROM Topic_18151156170844688127 TO Table_18151156170844688127 USING $l WITH ( CONNECTION_STRING = "grp§c://localhost:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_18151156170844688127: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for grp§c://localhost:2135: C-ares status is not ARES_SUCCESS qtype=A name=grp§c://localhost:2135 is_balancer=0: Misformatted domain name } {
: Error: Grpc error response on endpoint grp§c://localhost:2135 } ]) } >>>>> EXPECTED: DNS resolution failed for grp§c://localhost:2135 DDL: DROP TRANSFER `Transfer_18151156170844688127`; DDL: DROP TABLE `Table_18151156170844688127` DDL: DROP TOPIC `Topic_18151156170844688127` DDL: CREATE TABLE `Table_303756923002799120` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_303756923002799120` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_303756923002799120 FROM Topic_303756923002799120 TO Table_303756923002799120 USING $l WITH ( CONNECTION_STRING = "grpc://domain-not-exists-localhost.com.moc:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_303756923002799120: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for domain-not-exists-localhost.com.moc:2135: C-ares status is not ARES_SUCCESS qtype=AAAA name=domain-not-exists-localhost.com.moc is_balancer=0: Domain name not found } {
: Error: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 } ]) } >>>>> EXPECTED: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 DDL: DROP TRANSFER `Transfer_303756923002799120`; DDL: DROP TABLE `Table_303756923002799120` DDL: DROP TOPIC `Topic_303756923002799120` DDL: CREATE USER u21787 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u21787@builtin` DDL: CREATE TABLE `Table_8798969862624551474` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.write', 'ydb.generic.read' ON `/local/Table_8798969862624551474` TO `u21787@builtin` DDL: CREATE TOPIC `Topic_8798969862624551474` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_8798969862624551474` TO `u21787@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8798969862624551474` FROM `Topic_8798969862624551474` TO `Table_8798969862624551474` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); DDL: DROP TOPIC `Topic_8798969862624551474` DDL: DROP TRANSFER `Transfer_8798969862624551474`; DDL: CREATE USER u9839 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u9839@builtin` DDL: CREATE TABLE `Table_8757973940971672634` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.read' ON `/local/Table_8757973940971672634` TO `u9839@builtin` DDL: CREATE TOPIC `Topic_8757973940971672634` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_8757973940971672634` TO `u9839@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8757973940971672634` FROM `Topic_8757973940971672634` TO `Table_8757973940971672634` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Executing ESchemeOpCreateTransfer, code: 2018 subissue: {
: Error: Access denied for scheme request, code: 2018 subissue: {
: Error: Access denied. } } } >>>>> EXPECTED: Access denied for scheme request DDL: DROP TOPIC `Topic_8757973940971672634` DDL: CREATE USER u62094 DDL: CREATE TABLE `Table_13491336256892321552` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13491336256892321552` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT 'ydb.generic.read' ON `/local/ ... BY `Message` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_16237438835767375663`; DDL: DROP TABLE `Table_16237438835767375663` DDL: CREATE TABLE `Table_10124032101020617428` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10124032101020617428` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_10124032101020617428` FROM `Topic_10124032101020617428` TO `Table_10124032101020617428` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10124032101020617428` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10124032101020617428` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_10124032101020617428` ORDER BY `CreateTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_10124032101020617428`; DDL: DROP TABLE `Table_10124032101020617428` DDL: DROP TOPIC `Topic_10124032101020617428` DDL: CREATE TABLE `Table_15926164708998202490` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_15926164708998202490` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_15926164708998202490` FROM `Topic_15926164708998202490` TO `Table_15926164708998202490` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_15926164708998202490` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_15926164708998202490` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_15926164708998202490` ORDER BY `CreateTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_15926164708998202490`; DDL: DROP TABLE `Table_15926164708998202490` DDL: DROP TOPIC `Topic_15926164708998202490` DDL: CREATE TABLE `Table_3520109807070563930` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3520109807070563930` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_3520109807070563930` FROM `Topic_3520109807070563930` TO `Table_3520109807070563930` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_3520109807070563930` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_3520109807070563930` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_3520109807070563930` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_3520109807070563930`; DDL: DROP TABLE `Table_3520109807070563930` DDL: DROP TOPIC `Topic_3520109807070563930` DDL: CREATE TABLE `Table_16029463466276803850` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16029463466276803850` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_16029463466276803850` FROM `Topic_16029463466276803850` TO `Table_16029463466276803850` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16029463466276803850` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16029463466276803850` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_16029463466276803850` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16029463466276803850`; DDL: DROP TABLE `Table_16029463466276803850` DDL: DROP TOPIC `Topic_16029463466276803850` DDL: CREATE TABLE `Table_13551730633225550962` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13551730633225550962` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_13551730633225550962` FROM `Topic_13551730633225550962` TO `Table_13551730633225550962` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_13551730633225550962` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_13551730633225550962` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_13551730633225550962` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_13551730633225550962`; DDL: DROP TABLE `Table_13551730633225550962` DDL: DROP TOPIC `Topic_13551730633225550962` DDL: CREATE TABLE `Table_15454957651150607679` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_15454957651150607679` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15454957651150607679` FROM `Topic_15454957651150607679` TO `Table_15454957651150607679` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_15454957651150607679` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_15454957651150607679` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_15454957651150607679` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_15454957651150607679`; DDL: DROP TABLE `Table_15454957651150607679` DDL: DROP TOPIC `Topic_15454957651150607679` DDL: CREATE TABLE `Table_1494567588467447837` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_1494567588467447837` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_1494567588467447837` FROM `Topic_1494567588467447837` TO `Table_1494567588467447837` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:10345/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_1494567588467447837` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_1494567588467447837` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_1494567588467447837` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_1494567588467447837`; DDL: DROP TABLE `Table_1494567588467447837` DDL: DROP TOPIC `Topic_1494567588467447837` DDL: CREATE TABLE `Table_16974869809439726317` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16974869809439726317` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16974869809439726317` FROM `Topic_16974869809439726317` TO `Table_16974869809439726317` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_16974869809439726317` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_16974869809439726317` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_16974869809439726317` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16974869809439726317`; DDL: DROP TABLE `Table_16974869809439726317` DDL: DROP TOPIC `Topic_16974869809439726317` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/transfer/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/transfer/tests/py3test >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/result_set_format/tests/py3test >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/result_set_format/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/result_set_format/tests/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 191 ------ [test_disk.py 0/10] chunk ran 1 test (total:65.33s - test:65.23s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.7G (15426200K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 623063 54.9M 54.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 623132 40.7M 23.4M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 623139 798M 800M 717M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 623299 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 623314 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 623446 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 623618 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 623796 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 623979 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 624145 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 624364 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 624639 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:84.03s - test:83.84s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.1G (15795160K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 623410 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 623516 40.6M 23.3M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 623522 787M 776M 707M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 624567 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 624794 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 625091 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 625319 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 625533 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 626016 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 626590 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 627081 1.7G 1.8G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_cms_restart.py 3/10] chunk ran 1 test (total:162.61s - test:162.55s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.5G (17323056K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 909808 54.8M 54.8M 6.7M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 909959 38.0M 19.8M 7.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 909963 820M 823M 741M └─ ydb-tests-functional-cms --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 910753 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 910958 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 911233 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 911470 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 911718 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 912030 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 912268 1.7G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 912637 1.7G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te 942622 1.5G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/cms/test-results/py3test/testing_out_stuff/stderr ydb/tests/fq/streaming_optimize [size:medium] nchunks:8 ------ [test_sql_negative.py 0/4] chunk ran 1 test (total:45.06s - recipes:1.03s test:41.95s recipes:1.97s) [fail] test_sql_negative.py::test[watermarks-bad_column-default.txt] [default-linux-x86_64-release-asan] (36.68s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bqfeoins/topic_3.txt' has failed with code 100. E Errors: E ...1fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b63fc81fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b63fc81fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b63fc976cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b63fc976cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b63fc8f6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b63fc8f6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b63fc87ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b63fc81f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b63fc81f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b63fc81f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b63fc9767c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b63fc9765fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b63fcf3e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459624 byte(s) leaked in 8655 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_column-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_negative.py 1/4] chunk ran 1 test (total:38.51s - recipes:1.44s test:36.31s recipes:0.68s) [fail] test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [default-linux-x86_64-release-asan] (31.55s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_o8kxva0o/topic_3.txt' has failed with code 100. E Errors: E ...afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b10edbafe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b10edbafd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b10edd06cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b10edd06cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b10edc86977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b10edc86977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b10edc0ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b10edbaf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b10edbaf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b10edbaf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b10edd067c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b10edd065fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b10ee2ce0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459376 byte(s) leaked in 8650 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_pushdown-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 0/4] chunk ran 8 tests (total:245.95s - recipes:1.00s test:242.75s recipes:0.60s) [fail] test_sql_streaming.py::test[hop-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (34.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cu0clu71/topic_3.txt' has failed with code 100. E Errors: E ...afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b8906bafe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b8906bafd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b8906d06cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b8906d06cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b8906c86977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b8906c86977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b8906c0ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b8906baf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b8906baf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b8906baf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b8906d067c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b8906d065fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b89072ce0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459514 byte(s) leaked in 8653 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (29.49s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xhfgdfsi/topic_3.txt' has failed with code 100. E Errors: E ...2fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bfb90f2fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bfb90f2fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bfb91086cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bfb91086cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bfb91006977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bfb91006977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bfb90f8ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bfb90f2f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bfb90f2f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bfb90f2f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bfb910867c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bfb910865fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bfb9164e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459625 byte(s) leaked in 8655 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (29.19s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1_9g95wl/topic_3.txt' has failed with code 100. E Errors: E ...efe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b98f65efe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b98f65efd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b98f6746cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b98f6746cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b98f66c6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b98f66c6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b98f664ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b98f65ef36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b98f65ef36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b98f65ef272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b98f67467c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b98f67465fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b98f6d0e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (34.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q5owg5_t/topic_3.txt' has failed with code 100. E Errors: E ...3fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b521143fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b521143fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b5211596cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b5211596cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b5211516977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b5211516977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b521149ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b521143f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b521143f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b521143f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b52115967c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b52115965fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b5211b5e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (32.91s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cql_0x5q/topic_3.txt' has failed with code 100. E Errors: E ...bfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bfad81bfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bfad81bfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bfad8316cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bfad8316cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bfad8296977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bfad8296977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bfad821ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bfad81bf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bfad81bf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bfad81bf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bfad83167c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bfad83165fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bfad88de0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459380 byte(s) leaked in 8650 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (25.39s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35r5f_bx/topic_3.txt' has failed with code 100. E Errors: E ...5fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b4369c5fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b4369c5fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b4369db6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b4369db6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b4369d36977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b4369d36977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b4369cbab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b4369c5f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b4369c5f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b4369c5f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b4369db67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b4369db65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b436a37e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459615 byte(s) leaked in 8655 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (18.65s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_re_11zp9/topic_3.txt' has failed with code 100. E Errors: E ...cfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bb50bdcfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bb50bdcfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bb50bf26cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bb50bf26cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bb50bea6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bb50bea6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bb50be2ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bb50bdcf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bb50bdcf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bb50bdcf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bb50bf267c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bb50bf265fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bb50c4ee0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (29.33s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n3rd_nz7/topic_3.txt' has failed with code 100. E Errors: E ...2fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b48bfe2fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b48bfe2fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b48bff86cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b48bff86cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b48bff06977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b48bff06977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b48bfe8ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b48bfe2f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b48bfe2f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b48bfe2f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b48bff867c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b48bff865fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b48c054e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459281 byte(s) leaked in 8648 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 8 tests (total:234.13s - recipes:0.80s test:231.86s recipes:0.69s) [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (35.74s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0qji8s8v/topic_3.txt' has failed with code 100. E Errors: E ...8fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b9724a8fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b9724a8fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b9724be6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b9724be6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b9724b66977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b9724b66977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b9724aeab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b9724a8f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b9724a8f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b9724a8f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b9724be67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b9724be65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b97251ae0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (25.33s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ag09_6aq/topic_3.txt' has failed with code 100. E Errors: E ...7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b9868b7fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b9868b7fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b9868cd6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b9868cd6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b9868c56977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b9868c56977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b9868bdab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b9868b7f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b9868b7f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b9868b7f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b9868cd67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b9868cd65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b986929e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459773 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (26.27s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__ncnqtpd/topic_3.txt' has failed with code 100. E Errors: E ...bfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b7571cbfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b7571cbfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b7571e16cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b7571e16cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b7571d96977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b7571d96977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b7571d1ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b7571cbf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b7571cbf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b7571cbf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b7571e167c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b7571e165fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b75723de0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 454845 byte(s) leaked in 8563 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (30.62s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__kxbnxak/topic_3.txt' has failed with code 100. E Errors: E ...9fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b9f0259fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b9f0259fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b9f026f6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b9f026f6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b9f02676977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b9f02676977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b9f025fab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b9f0259f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b9f0259f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b9f0259f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b9f026f67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b9f026f65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b9f02cbe0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459462 byte(s) leaked in 8652 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (34.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_17kofc4z/topic_3.txt' has failed with code 100. E Errors: E ...5fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7be725e5fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7be725e5fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7be725fb6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7be725fb6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7be725f36977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7be725f36977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7be725ebab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7be725e5f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7be725e5f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7be725e5f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7be725fb67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7be725fb65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7be72657e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459681 byte(s) leaked in 8656 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (27.29s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ypwu_q3c/topic_3.txt' has failed with code 100. E Errors: E ...ffe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b95220ffe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b95220ffd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b9522256cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b9522256cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b95221d6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b95221d6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b952215ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b95220ff36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b95220ff36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b95220ff272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b95222567c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b95222565fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b952281e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459771 byte(s) leaked in 8658 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (20.07s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_cii8fgtm/topic_3.txt' has failed with code 100. E Errors: E ...7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bdf83c7fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bdf83c7fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bdf83dd6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bdf83dd6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bdf83d56977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bdf83d56977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bdf83cdab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bdf83c7f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bdf83c7f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bdf83c7f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bdf83dd67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bdf83dd65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bdf8439e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (26.28s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hz7xuhzm/topic_3.txt' has failed with code 100. E Errors: E ...5fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7affe385fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7affe385fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7affe39b6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7affe39b6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7affe3936977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7affe3936977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7affe38bab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7affe385f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7affe385f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7affe385f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7affe39b67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7affe39b65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7affe3f7e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 8 tests (total:226.99s - recipes:0.81s test:224.67s recipes:0.97s) [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (30.32s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_746wwi92/topic_3.txt' has failed with code 100. E Errors: E ...dfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b1da84dfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b1da84dfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b1da8636cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b1da8636cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b1da85b6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b1da85b6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b1da853ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b1da84df36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b1da84df36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b1da84df272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b1da86367c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b1da86365fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b1da8bfe0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 454845 byte(s) leaked in 8563 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (26.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1itpomqs/topic_3.txt' has failed with code 100. E Errors: E ...afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b28106afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b28106afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b2810806cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b2810806cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b2810786977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b2810786977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b281070ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b28106af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b28106af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b28106af272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b28108067c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b28108065fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b2810dce0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (25.58s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xm1zoc7h/topic_3.txt' has failed with code 100. E Errors: E ...0fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bddb130fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bddb130fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bddb1466cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bddb1466cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bddb13e6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bddb13e6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bddb136ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bddb130f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bddb130f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bddb130f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bddb14667c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bddb14665fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bddb1a2e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (34.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tbxkuzd/topic_3.txt' has failed with code 100. E Errors: E ...afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b88e25afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b88e25afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b88e2706cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b88e2706cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b88e2686977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b88e2686977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b88e260ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b88e25af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b88e25af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b88e25af272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b88e27067c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b88e27065fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b88e2cce0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459723 byte(s) leaked in 8657 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (30.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_15sj9q6c/topic_3.txt' has failed with code 100. E Errors: E ...7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b85a367fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b85a367fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b85a37d6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b85a37d6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b85a3756977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b85a3756977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b85a36dab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b85a367f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b85a367f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b85a367f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b85a37d67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b85a37d65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b85a3d9e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (28.44s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_c_x75wbn/topic_3.txt' has failed with code 100. E Errors: E ...dfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b620dcdfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b620dcdfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b620de36cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b620de36cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b620ddb6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b620ddb6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b620dd3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b620dcdf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b620dcdf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b620dcdf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b620de367c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b620de365fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b620e3fe0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (20.03s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_59_63ajj/topic_3.txt' has failed with code 100. E Errors: E ...7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b41c5e7fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b41c5e7fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b41c5fd6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b41c5fd6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b41c5f56977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b41c5f56977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b41c5edab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b41c5e7f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b41c5e7f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b41c5e7f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b41c5fd67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b41c5fd65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b41c659e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (21.59s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zx97z66g/topic_3.txt' has failed with code 100. E Errors: E ...6fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bc5be06fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bc5be06fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bc5be1c6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bc5be1c6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bc5be146977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bc5be146977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bc5be0cab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bc5be06f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bc5be06f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bc5be06f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bc5be1c67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bc5be1c65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bc5be78e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:213.14s - recipes:1.31s test:210.47s recipes:0.65s) [fail] test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (31.87s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yqht2olc/topic_3.txt' has failed with code 100. E Errors: E ...cfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b04d28cfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b04d28cfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b04d2a26cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b04d2a26cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b04d29a6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b04d29a6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b04d292ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b04d28cf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b04d28cf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b04d28cf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b04d2a267c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b04d2a265fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b04d2fee0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (28.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_41f3eth8/topic_3.txt' has failed with code 100. E Errors: E ...ffe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7ba7765ffe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7ba7765ffd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7ba776756cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7ba776756cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7ba7766d6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7ba7766d6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7ba77665ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7ba7765ff36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7ba7765ff36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7ba7765ff272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7ba7767567c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7ba7767565fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7ba776d1e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (27.54s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ta7bwxp8/topic_3.txt' has failed with code 100. E Errors: E ...4fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b599314fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b599314fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b59932a6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b59932a6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b5993226977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b5993226977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b59931aab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b599314f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b599314f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b599314f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b59932a67c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b59932a65fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b599386e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.solomon-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks-default.txt] [default-linux-x86_64-release-asan] (31.48s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_oof3bfo1/topic_3.txt' has failed with code 100. E Errors: E ...0fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b814c60fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b814c60fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b814c766cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b814c766cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b814c6e6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b814c6e6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b814c66ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b814c60f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b814c60f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b814c60f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b814c7667c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b814c7665fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b814cd2e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459509 byte(s) leaked in 8653 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [default-linux-x86_64-release-asan] (38.22s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_242bqsqa/topic_3.txt' has failed with code 100. E Errors: E ...dfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b7ce0cdfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b7ce0cdfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b7ce0e36cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b7ce0e36cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b7ce0db6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b7ce0db6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b7ce0d3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b7ce0cdf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b7ce0cdf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b7ce0cdf272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b7ce0e367c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b7ce0e365fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b7ce13fe0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459726 byte(s) leaked in 8657 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_adjust-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [default-linux-x86_64-release-asan] (25.52s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1dz4hgn9/topic_3.txt' has failed with code 100. E Errors: E ...afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7bb3d19afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7bb3d19afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7bb3d1b06cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7bb3d1b06cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7bb3d1a86977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7bb3d1a86977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7bb3d1a0ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7bb3d19af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7bb3d19af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7bb3d19af272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7bb3d1b067c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7bb3d1b065fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7bb3d20ce0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459620 byte(s) leaked in 8655 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_as-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [default-linux-x86_64-release-asan] (19.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n03v9hiu/topic_3.txt' has failed with code 100. E Errors: E ...3fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #30 0x7b43a563fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #31 0x7b43a563fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #32 0x7b43a5796cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #33 0x7b43a5796cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #34 0x7b43a5716977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #35 0x7b43a5716977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #36 0x7b43a569ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #37 0x7b43a563f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #38 0x7b43a563f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E #39 0x7b43a563f272 in PyObject_CallFunction /-S/contrib/tools/python3/Objects/call.c:584:14 E #40 0x7b43a57967c4 in PyImport_Import /-S/contrib/tools/python3/Python/import.c:3071:9 E #41 0x7b43a57965fa in PyImport_ImportModule /-S/contrib/tools/python3/Python/import.c:2493:14 E #42 0x7b43a5d5e0c4 in init_import_site /-S/contrib/tools/python3/Python/pylifecycle.c:2317:9 E E SUMMARY: AddressSanitizer: 459822 byte(s) leaked in 8659 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_drop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 33 - FAIL ydb/tests/fq/streaming_optimize ydb/public/lib/ydb_cli/topic/ut [size:medium] nchunks:10 ------ [1/10] chunk ran 3 tests (total:81.83s - test:81.67s) [fail] TTopicReaderTests::TestRun_ReadOneMessage [default-linux-x86_64-release-asan] (22.50s) assertion failed at ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp:176, void NYdb::NConsoleClient::TTopicReaderTests::RunTest(const TVector &, const TVector &, const TString &, TTopicReaderSettings &&): (split.size() == expected.size()) failed: (0 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NYdb::NConsoleClient::TTopicReaderTests::RunTest(TVector>, std::__y1::allocator>>> const&, TVector>, std::__y1::allocator>>> const&, TBasicString> const&, NYdb::NConsoleClient::TTopicReaderSettings&&) at /-S/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp:176:13 ~__hash_table at /-S/contrib/libs/cxxsupp/libcxx/include/__hash_table:1131:35 NYdb::NConsoleClient::TTopicReaderTests::Execute()::TTestRun_ReadOneMessageCaller::X(NYdb::NConsoleClient::TTopicReaderTests*, NUnitTest::TTestContext&) at /-S/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp:12:9 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/testing_out_stuff/TTopicReaderTests.TestRun_ReadOneMessage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/testing_out_stuff/TTopicReaderTests.TestRun_ReadOneMessage.out ------ FAIL: 23 - GOOD, 1 - FAIL ydb/public/lib/ydb_cli/topic/ut ydb/tests/fq/http_api [size:medium] ------ sole chunk ran 16 tests (total:128.50s - recipes:12.60s test:111.57s recipes:4.20s) [fail] test_http_api.py::TestHttpApi::test_simple_analytics_query [default-linux-x86_64-release-asan] (19.41s) ydb/tests/fq/http_api/test_http_api.py:81: in test_simple_analytics_query wait_for_query_status(client, query_id, ["COMPLETED"]) library/python/retry/__init__.py:199: in wrapped return _retry(conf, functools.partial(f, *f_args, **f_kwargs)) library/python/retry/__init__.py:224: in _retry return f() ydb/tests/fq/http_api/test_http_api.py:24: in wait_for_query_status raise Exception(f"Status {status} is not in {statuses}") E Exception: Status RUNNING is not in ['COMPLETED'] Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/http_api/test-results/py3test/testing_out_stuff/test_http_api.py.TestHttpApi.test_simple_analytics_query.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/http_api/test-results/py3test/testing_out_stuff ------ FAIL: 15 - GOOD, 1 - FAIL ydb/tests/fq/http_api ydb/public/sdk/cpp/tests/integration/sessions_pool [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:24.89s - recipes:9.07s test:11.86s recipes:3.91s) [fail] YdbSdkSessionsPool::StressTestSync/0 [default-linux-x86_64-release-asan] (10.35s) Value param: 1 ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 1 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.0.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.0.out ------ FAIL: 11 - GOOD, 1 - FAIL ydb/public/sdk/cpp/tests/integration/sessions_pool ydb/core/persqueue/pqtablet/partition/mlp/ut [size:medium] ------ sole chunk ran 75 tests (total:605.91s - setup:0.08s test:600.16s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TMLPConsumerTests::RecreateConsumer (timeout) duration: 523.21s TMLPConsumerTests::ReloadPQTablet (good) duration: 42.48s TMLPConsumerTests::AlterConsumer (good) duration: 22.68s 72 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/stderr [timeout] TMLPConsumerTests::RecreateConsumer [default-linux-x86_64-release-asan] (523.21s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/TMLPConsumerTests.RecreateConsumer.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/TMLPConsumerTests.RecreateConsumer.out ------ TIMEOUT: 2 - GOOD, 72 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/persqueue/pqtablet/partition/mlp/ut ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [0/10] chunk ran 30 tests (total:240.12s - setup:0.01s test:239.90s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (9.11s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19405 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3831 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [default-linux-x86_64-release-asan] (9.26s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16360 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [default-linux-x86_64-release-asan] (8.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16813 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [default-linux-x86_64-release-asan] (8.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20442 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20.out ------ [1/10] chunk ran 30 tests (total:240.62s - test:240.40s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (9.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:61627 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [default-linux-x86_64-release-asan] (9.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32488 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [default-linux-x86_64-release-asan] (10.69s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3603 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64.out ------ [2/10] chunk ran 30 tests (total:246.57s - test:246.48s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [default-linux-x86_64-release-asan] (9.23s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25109 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (9.77s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:6247 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (9.56s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24748 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [default-linux-x86_64-release-asan] (12.57s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24011 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [default-linux-x86_64-release-asan] (9.30s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20973 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (8.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1579 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out ------ [3/10] chunk ran 30 tests (total:241.23s - setup:0.01s test:241.04s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [default-linux-x86_64-release-asan] (8.97s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25025 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (11.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20570 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [default-linux-x86_64-release-asan] (8.03s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4133 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (8.43s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23782 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [default-linux-x86_64-release-asan] (9.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13272 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61.out ------ [4/10] chunk ran 30 tests (total:246.37s - test:246.25s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [default-linux-x86_64-release-asan] (9.51s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27285 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [default-linux-x86_64-release-asan] (9.20s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10672 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [default-linux-x86_64-release-asan] (9.00s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5902 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [default-linux-x86_64-release-asan] (9.25s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64511 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [default-linux-x86_64-release-asan] (10.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19565 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [default-linux-x86_64-release-asan] (10.11s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26040 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [default-linux-x86_64-release-asan] (9.15s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10236 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [default-linux-x86_64-release-asan] (8.65s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:63552 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45.out ------ [5/10] chunk ran 30 tests (total:240.80s - test:240.69s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [default-linux-x86_64-release-asan] (10.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8817 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (8.81s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8582 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [default-linux-x86_64-release-asan] (8.92s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23515 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [default-linux-x86_64-release-asan] (12.40s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28757 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [default-linux-x86_64-release-asan] (9.06s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23561 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [default-linux-x86_64-release-asan] (9.57s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25542 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (8.46s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21811 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [default-linux-x86_64-release-asan] (10.00s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12677 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57.out ------ [6/10] chunk ran 30 tests (total:241.85s - setup:0.01s test:241.63s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [default-linux-x86_64-release-asan] (9.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14596 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [default-linux-x86_64-release-asan] (11.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7323 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [default-linux-x86_64-release-asan] (8.62s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22288 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [default-linux-x86_64-release-asan] (8.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25174 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [default-linux-x86_64-release-asan] (9.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19076 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [default-linux-x86_64-release-asan] (10.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8347 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.out ------ [7/10] chunk ran 30 tests (total:246.83s - test:246.68s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (9.40s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23028 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (10.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10897 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [default-linux-x86_64-release-asan] (10.66s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14691 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [default-linux-x86_64-release-asan] (8.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: No permission to connect to the database, code: 200000 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [default-linux-x86_64-release-asan] (8.30s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1369 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.70s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3735 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [default-linux-x86_64-release-asan] (10.39s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: No permission to connect to the database, code: 200000 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [default-linux-x86_64-release-asan] (8.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25831 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [default-linux-x86_64-release-asan] (8.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28245 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15.out ------ [8/10] chunk ran 30 tests (total:253.08s - setup:0.01s test:252.96s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [default-linux-x86_64-release-asan] (9.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20581 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [default-linux-x86_64-release-asan] (9.09s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14953 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (10.59s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9054 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [default-linux-x86_64-release-asan] (10.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28012 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [default-linux-x86_64-release-asan] (11.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3979 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [default-linux-x86_64-release-asan] (9.64s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19497 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [default-linux-x86_64-release-asan] (10.48s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9318 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (9.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8732 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.out ------ [9/10] chunk ran 30 tests (total:298.82s - setup:0.04s test:297.85s) [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (14.27s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26102 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (11.56s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4631 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [default-linux-x86_64-release-asan] (9.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21425 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (12.05s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22781 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (13.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5308 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [default-linux-x86_64-release-asan] (11.42s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20157 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [default-linux-x86_64-release-asan] (15.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29879 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (12.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23482 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (14.20s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5942 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.out ------ FAIL: 233 - GOOD, 67 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ------ sole chunk ran 2 tests (total:129.04s - setup:0.06s recipes:29.16s test:93.64s recipes:5.97s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.0G (17795408K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1013509 54.9M 54.7M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1013595 42.0M 24.6M 11.9M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1025306 311M 417M 348M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/10 1014986 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1015092 2.1G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1015413 2.2G 2.2G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1015572 2.1G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1015934 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1016334 2.1G 1.9G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1016696 2.1G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1017150 2.1G 2.0G 1.6G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [29/50] chunk ran 3 tests (total:575.19s - test:574.56s) [crashed] KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [default-linux-x86_64-release-asan] (183.45s) Test crashed (return code: 100) ==197200==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 27200 byte(s) in 1 object(s) allocated from: #0 0x00001b87a0fd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021ce721b in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021ce721b in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021ce721b in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021ce721b in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021ce721b in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021ce721b in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021ce721b in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021ce721b in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021ce6a8d in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr con ..[snippet truncated].. #17 0x0000215092a8 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #18 0x000021d51f3e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #19 0x0000225f0faf in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #20 0x0000225f0faf in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:212:18 #21 0x00001bbabade in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #22 0x00001bbabade in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #23 0x00001bbabade in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #24 0x00001bbac03c in Execute /-S/util/thread/factory.h:15:13 #25 0x00001bbac03c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #26 0x00001bba0024 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #27 0x00001b83dc16 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 429042 byte(s) leaked in 5259 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.ConflictWrite.IsOlap.FillTables.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.ConflictWrite.IsOlap.FillTables.out ------ FAIL: 167 - GOOD, 1 - CRASHED ydb/core/kqp/ut/tx ydb/core/viewer/tests [size:medium] ------ sole chunk ran 51 tests (total:131.82s - setup:0.29s test:129.68s canon:0.22s) [fail] test.py::TestViewer::test_viewer_tenantinfo [default-linux-x86_64-release-asan] (0.03s) Test results differ from canonical: test_result['TenantInfo'][1]['Id']: - 6 + 42 test_result['TenantInfo'][2]['Id']: - 8 + 44 test_result['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_tenantinfo_db [default-linux-x86_64-release-asan] (0.14s) Test results differ from canonical: test_result['/Root/dedicated_db']['TenantInfo'][0]['Id']: - 6 + 42 test_result['/Root/serverless_db']['TenantInfo'][0]['Id']: - 8 + 44 test_result['/Root/serverless_db']['TenantInfo'][0]['ResourceId']: - 7 + 43 test_result['/Root/shared_db']['TenantInfo'][0]['Id']: - 7 + 43 test_result['no-database']['TenantInfo'][1]['Id']: - 6 + 42 test_result['no-database']['TenantInfo'][2]['Id']: - 8 + 44 test_result['no-database']['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['no-database']['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo_db.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_scheme_directory [default-linux-x86_64-release-asan] (0.43s) Test results differ from canonical: test_result['1-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['1-get']['self']['owner']: - root + user1 test_result['3-get']['children'][4]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['3-get']['self']['owner']: - root + user1 test_result['5-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['5-get']['self']['owner']: - root + user1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_scheme_directory.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 48 - GOOD, 3 - FAIL ydb/core/viewer/tests ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:228.82s - setup:0.01s test:228.41s) [fail] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (216.71s) ydb/tests/stress/olap_workload/tests/test_workload.py:27: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/olap_workload --endpoint grpc://localhost:25122 --database /Root --duration 120' has failed with code 1. E Errors: E ...c E for next_opt in opt_generator: E ^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/retries.py", line 98, in retry_operation_impl E result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) E ^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/pool.py", line 250, in wrapped_callee E return [result_set for result_set in it] E ^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ E return self._next() E ^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next E res = self.wrapper(next(self.it)) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/session.py", line 390, in E lambda resp: base.wrap_execute_query_response( E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 211, in decorator E return func(rpc_state, response_pb, session_state, *args, **kwargs) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 229, in wrap_execute_query_response E issues._process_response(response_pb) E File "contrib/python/ydb/py3/ydb/issues.py", line 237, in _process_response E raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Timeout: message: "Query compilation timed out." severity: 1 (server_code: 400090) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/olap_workload/tests ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [43/50] chunk ran 4 tests (total:97.63s - test:97.59s) [fail] KqpStats::OneShardNonLocalExec+UseSink [default-linux-x86_64-release-asan] (54.85s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:714, auto NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseOneShardNonLocalExec::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(size_t) [UseSink = true]: (done) unable to wait tablets move on specific node 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BFA12CB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C49A1AB 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:714: operator() @ 0x1BB8EBDC 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:778: Execute_ @ 0x1BB78B4E 4. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1BB27627 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB27627 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1BB27627 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1BB27627 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1BB27627 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C4D2E19 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C4D2E19 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C4D2E19 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C4A0E87 13. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1BB2691E 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C4A263F 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C4CCC7C 16. ??:0: ?? @ 0x7F132B2EFD8F 17. ??:0: ?? @ 0x7F132B2EFE3F 18. ??:0: ?? @ 0x18BC4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.OneShardNonLocalExec.UseSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.OneShardNonLocalExec.UseSink.out ------ FAIL: 233 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [36/40] chunk ran 2 tests (total:113.23s - test:113.17s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (80.20s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 79 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ydb/core/quoter/ut [size:medium] ------ sole chunk ran 38 tests (total:341.02s - setup:0.12s test:340.07s) [fail] QuoterWithKesusTest::PrefetchCoefficient [default-linux-x86_64-release-asan] (13.41s) assertion failed at ydb/core/quoter/ut_helpers.cpp:122, void NKikimr::TKesusQuoterTestSetup::GetQuota(const std::vector> &, TEvQuota::EResourceOperator, TDuration, TEvQuota::TEvClearance::EResult): (answer->Result == expectedResult) failed: (Success != Deadline) , with diff: (Succ|D)e(ss|adline) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::TKesusQuoterTestSetup::GetQuota(std::__y1::vector>, TBasicString>, unsigned long>, std::__y1::allocator>, TBasicString>, unsigned long>>> const&, NKikimr::TEvQuota::EResourceOperator, TDuration, NKikimr::TEvQuota::TEvClearance::EResult) at /-S/ydb/core/quoter/ut_helpers.cpp:122:5 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:546:18 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/quoter/kesus_quoter_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/quoter/ut/test-results/unittest/testing_out_stuff/QuoterWithKesusTest.PrefetchCoefficient.out ------ FAIL: 37 - GOOD, 1 - FAIL ydb/core/quoter/ut ------ sole chunk ran 2 tests (total:574.41s - recipes:14.20s test:557.00s recipes:2.72s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.5G (18367224K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 958982 54.8M 54.5M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 959026 41.6M 24.1M 11.5M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 961786 58.6M 57.9M 32.5M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 962031 2.1G 2.1G 2.1G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/y 959240 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959247 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959483 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 959693 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 960025 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 960188 1.9G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 960351 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 960517 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ydb/core/transfer/ut/functional [size:medium] ------ sole chunk ran 58 tests (total:317.44s - setup:0.02s recipes:28.41s test:286.34s recipes:2.43s) [fail] Transfer::LocalTopic_BigMessage [default-linux-x86_64-release-asan] (26.82s) assertion failed at ydb/core/transfer/ut/common/utils.h:836, void NReplicationTest::MainTestCase::CheckResult(const std::string &, const TExpectations &): (false) Unable to wait transfer result TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NReplicationTest::MainTestCase::CheckResult(std::__y1::basic_string, std::__y1::allocator> const&, TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&) at /-S/ydb/core/transfer/ut/common/utils.h:0:9 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:546:18 operator() at /-S/ydb/core/transfer/ut/functional/transfer_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff/Transfer.LocalTopic_BigMessage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff/Transfer.LocalTopic_BigMessage.out ------ FAIL: 57 - GOOD, 1 - FAIL ydb/core/transfer/ut/functional ydb/core/kqp/workload_service/ut [size:medium] nchunks:10 ------ [6/10] chunk ran 6 tests (total:145.70s - test:145.56s) [fail] ResourcePoolClassifiersDdl::TestDropResourcePool [default-linux-x86_64-release-asan] (63.28s) assertion failed at ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:778, static void NKikimr::NKqp::NWorkload::IYdbSetup::WaitFor(TDuration, TString, std::function): (false) Waiting Resource pool classifier success timeout. Spent time 20.311962s exceeds limit 20.000000s 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B81ECEB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BD0FD0B 2. /tmp//-S/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:778: WaitFor @ 0x38D37112 3. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:828: WaitForSuccess @ 0x1B38C2D6 4. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:911: Execute_ @ 0x1B391918 5. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756: operator() @ 0x1B48CAF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756:1) &> @ 0x1B48CAF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756:1) &> @ 0x1B48CAF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B48CAF7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B48CAF7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BD3F149 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BD3F149 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BD3F149 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BD169E7 14. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:756: Execute @ 0x1B48BE33 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BD1819F 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BD38FAC 17. ??:0: ?? @ 0x7FF058636D8F 18. ??:0: ?? @ 0x7FF058636E3F 19. ??:0: ?? @ 0x18A2E028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/ResourcePoolClassifiersDdl.TestDropResourcePool.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/ResourcePoolClassifiersDdl.TestDropResourcePool.out ------ FAIL: 61 - GOOD, 1 - FAIL ydb/core/kqp/workload_service/ut ydb/tests/stress/kafka/tests [size:medium] ------ sole chunk ran 1 test (total:193.58s - setup:0.03s test:193.37s) [fail] test_kafka_streams.py::TestYdbTopicWorkload::test [default-linux-x86_64-release-asan] (184.87s) ydb/tests/stress/kafka/tests/test_kafka_streams.py:24: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/kafka_streams_test --endpoint grpc://localhost:24044 --database /Root --bootstrap http://localhost:24244 --source-path test-topic --target-path target-topic --consumer workload-consumer-0 --num-workers 2 --duration 120' has failed with code 255. E Errors: E ...nder.handleProduceResponse(Sender.java:570) E at org.apache.kafka.clients.producer.internals.Sender.lambda$sendProduceRequest$5(Sender.java:844) E at org.apache.kafka.clients.ClientResponse.onComplete(ClientResponse.java:109) E at org.apache.kafka.clients.NetworkClient.completeResponses(NetworkClient.java:583) E at org.apache.kafka.clients.NetworkClient.poll(NetworkClient.java:575) E at org.apache.kafka.clients.producer.internals.Sender.runOnce(Sender.java:328) E at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:243) E at java.base/java.lang.Thread.run(Thread.java:840) E Caused by: org.apache.kafka.common.errors.UnknownServerException: Unexpected termination E Traceback (most recent call last): E File "contrib/tools/python3/Lib/runpy.py", line 198, in _run_module_as_main E return _run_code(code, main_globals, None, E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/tools/python3/Lib/runpy.py", line 88, in _run_code E exec(code, run_globals) E File "ydb/tests/stress/kafka/__main__.py", line 27, in E workload.loop() E File "ydb/tests/stress/kafka/workload/__init__.py", line 125, in loop E assert totalMessCountTest == totalMessCountTarget, f"Source and target {self.target_topic_path}-{i} topics total messages count are not equal:" + \ E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E AssertionError: Source and target target-topic-0 topics total messages count are not equal:{totalMessCountTest} and {totalMessCountTarget} respectively. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff/test_kafka_streams.py.TestYdbTopicWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/kafka/tests ydb/tests/functional/dstool [size:medium] ------ sole chunk ran 4 tests (total:204.64s - test:204.51s canon:0.03s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.0G (14647132K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 760907 54.8M 54.8M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 761060 40.8M 23.5M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 761068 819M 817M 743M └─ ydb-tests-functional-dstool --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 824301 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 824602 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837698 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 825073 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837823 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 825497 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837809 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 826015 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837744 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 826418 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837738 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 826836 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837695 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st 827247 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff 837765 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_st Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/stderr [fail] test_canonical_requests.py::Test::test_essential [default-linux-x86_64-release-asan] (104.46s) ydb/tests/functional/dstool/test_canonical_requests.py:152: in test_essential self._wait_pdisk_metrics_collected() ydb/tests/functional/dstool/test_canonical_requests.py:86: in _wait_pdisk_metrics_collected wait_for_and_assert(all_pdisk_metrics_collected, is_(True), message='All pdisk metrics collected') ydb/tests/library/common/wait_for.py:73: in wait_for_and_assert assert_that(result, matcher, message) E AssertionError: All pdisk metrics collected E Expected: E but: was Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/test_canonical_requests.py.Test.test_essential.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff ------ FAIL: 3 - GOOD, 1 - FAIL ydb/tests/functional/dstool ydb/core/kqp/ut/pg [size:medium] nchunks:10 ------ [0/10] chunk ran 12 tests (total:175.11s - setup:0.01s test:174.96s) [fail] KqpPg::AlterColumnSetDefaultFromSequence [default-linux-x86_64-release-asan] (8.22s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequence.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequence.out ------ [8/10] chunk ran 11 tests (total:423.60s - test:423.46s) [fail] KqpPg::TempTablesDrop [default-linux-x86_64-release-asan] (9.71s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.out [fail] KqpPg::TempTablesWithCache [default-linux-x86_64-release-asan] (9.30s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.out ------ [9/10] chunk ran 11 tests (total:344.25s - test:344.18s) [fail] PgCatalog::CheckSetConfig [default-linux-x86_64-release-asan] (8.57s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.out ------ FAIL: 110 - GOOD, 4 - FAIL ydb/core/kqp/ut/pg ydb/core/statistics/service/ut [size:medium] nchunks:60 ------ [15/60] chunk ran 1 test (total:604.45s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::CountMinSketchServerlessStatistics (timeout) duration: 602.42s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::CountMinSketchServerlessStatistics [default-linux-x86_64-release-asan] (602.42s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.out ------ [16/60] chunk ran 1 test (total:606.53s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::CountMinSketchStatistics (timeout) duration: 604.50s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::CountMinSketchStatistics [default-linux-x86_64-release-asan] (604.50s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchStatistics.out ------ [17/60] chunk ran 1 test (total:605.44s - setup:0.01s test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::EqWidthHistogram (timeout) duration: 603.09s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::EqWidthHistogram [default-linux-x86_64-release-asan] (603.09s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.out ------ [19/60] chunk ran 1 test (total:605.28s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::SimpleColumnStatistics (timeout) duration: 603.29s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::SimpleColumnStatistics [default-linux-x86_64-release-asan] (603.29s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.SimpleColumnStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.SimpleColumnStatistics.out ------ TIMEOUT: 23 - GOOD, 4 - TIMEOUT ydb/core/statistics/service/ut ydb/core/cms/ut_sentinel_unstable [size:medium] ------ sole chunk ran 1 test (total:629.24s - setup:0.03s test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TSentinelUnstableTests::BSControllerCantChangeStatus (timeout) duration: 606.03s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/stderr [timeout] TSentinelUnstableTests::BSControllerCantChangeStatus [default-linux-x86_64-release-asan] (606.03s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.out ------ TIMEOUT: 1 - TIMEOUT ydb/core/cms/ut_sentinel_unstable ------ sole chunk ran 1 test (total:132.56s - test:132.32s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.6G (16317488K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 762007 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 762298 40.9M 23.6M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 762333 745M 731M 666M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 765905 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 766850 1.9G 2.0G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 767844 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 768611 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 769462 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 770309 1.9G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 771139 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 772067 1.7G 1.8G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ------ [test_discovery.py] chunk ran 3 tests (total:155.07s - test:155.02s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 15.9G (16666944K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 846908 54.9M 54.9M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 846930 40.6M 23.4M 10.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 846933 838M 845M 763M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 877494 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 878097 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 878961 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 879318 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 880040 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 880706 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 883191 1.6G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 883198 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 883317 1.5G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/core/persqueue/public/describer/ut ------ sole chunk ran 4 tests (total:67.96s - test:60.04s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: TDescriberTests::TopicExists (good) duration: 27.22s TDescriberTests::TopicNotExists (good) duration: 21.30s TDescriberTests::TopicNotTopic (timeout) duration: 17.53s TDescriberTests::CDC test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] TDescriberTests::TopicNotTopic [default-linux-x86_64-release-asan] (17.53s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/TDescriberTests.TopicNotTopic.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/TDescriberTests.TopicNotTopic.out ------ TIMEOUT: 2 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/persqueue/public/describer/ut ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:82.13s - setup:0.04s test:81.91s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (71.52s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 46 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037889: 6) (72075186224038606: 6) (72075186224038622: 6) (72075186224038638: 6) (72075186224038653: 4) (72075186224038654: 6) (72075186224038660: 6) (72075186224038661: 4) (72075186224038710: 6) (72075186224038724: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ------ [0/10] chunk ran 2 tests (total:479.71s - test:479.44s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 20.0G (20929232K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 536143 54.8M 54.6M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 536203 39.5M 22.0M 9.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 536206 1.0G 1.0G 966M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 539198 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 539754 2.2G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 540368 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 540728 2.3G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 541146 2.1G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 541541 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 541870 2.2G 1.9G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 542287 2.3G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 542572 2.3G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [2/10] chunk ran 2 tests (total:370.71s - setup:0.01s test:369.80s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 21.8G (22891620K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 46947 54.9M 54.4M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 47038 41.0M 23.7M 10.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 47054 866M 862M 834M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 50084 2.4G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122260 2.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 50338 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122220 2.2G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 50731 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 50957 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122322 2.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 51255 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122363 2.2G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 51590 2.2G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122352 2.2G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 52037 2.2G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122221 2.2G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 52579 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122278 2.1G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 52938 2.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 122259 2.4G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [3/10] chunk ran 2 tests (total:205.76s - test:205.09s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 16.8G (17633560K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 749025 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749049 40.4M 23.1M 10.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749051 975M 965M 878M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 750393 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836199 1.9G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 751444 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836202 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 752383 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836210 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 752906 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836197 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 753451 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836141 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 754190 1.7G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 836188 1.7G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [4/10] chunk ran 2 tests (total:144.49s - test:144.30s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.7G (18551648K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 749720 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749740 40.3M 22.9M 10.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749744 910M 917M 818M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 751861 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 752394 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 752993 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 753649 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 754243 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 754763 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 755330 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 755743 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 756029 1.8G 1.8G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [5/10] chunk ran 8 tests (total:101.25s - test:101.00s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.6G (18466584K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 749027 54.8M 54.8M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749052 40.5M 23.2M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 749054 801M 806M 706M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 750700 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 792341 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 751314 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 751723 1.6G 1.7G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 792368 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 752239 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 752817 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 792150 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 753321 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 754099 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 792403 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 754697 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 792261 1.8G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ydb/tests/datashard/async_replication [size:medium] nchunks:20 ------ [0/20] chunk ran 2 tests (total:601.03s - test:600.16s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] (good) duration: 367.79s test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] (good) duration: 227.82s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/async_replication/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/stderr ------ [6/20] chunk ran 1 test (total:141.92s - test:141.84s) [fail] test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [default-linux-x86_64-release-asan] (133.96s) ydb/tests/datashard/async_replication/test_async_replication.py:91: in test_async_replication self.do_test_async_replication(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/async_replication/test_async_replication.py:31: in do_test_async_replication assert wait_for(self.create_predicate(False, table_name, dml_cluster_2.query), timeout_seconds=100), "Expected non-zero rows after insert" ydb/tests/library/common/wait_for.py:16: in wait_for if predicate(): ydb/tests/datashard/async_replication/test_async_replication.py:45: in predicate rows = dml_cluster( ydb/tests/datashard/lib/dml_operations.py:15: in query return self.query_object.query(text) ydb/tests/sql/lib/test_query.py:35: in query result_sets = self.pool.execute_with_retries(text, parameters=parameters, retry_settings=retry_settings) contrib/python/ydb/py3/ydb/query/pool.py:252: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:137: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:98: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:250: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:390: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:211: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:229: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/table_index_4__SYNC]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff/test_async_replication.py.TestAsyncReplication.test_async_replication.table_index_4__SYNC-pk_types0-all_types0-index0---SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/async_replication/test-results/py3test/testing_out_stuff ------ TIMEOUT: 20 - GOOD, 1 - FAIL ydb/tests/datashard/async_replication ydb/core/tx/datashard/build_index/ut [size:medium] ------ sole chunk ran 50 tests (total:611.08s - setup:0.10s test:600.18s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TTxDataShardPrefixKMeansScan::BuildToPosting (good) duration: 36.06s TTxDataShardPrefixKMeansScan::BuildToBuild (good) duration: 32.70s TTxDataShardBuildIndexScan::ShadowBorrowCompaction (good) duration: 29.76s TTxDataShardBuildIndexScan::BadRequest (good) duration: 28.71s TTxDataShardLocalKMeansScan::BuildToBuild_Ranges (good) duration: 26.86s TTxDataShardRecomputeKMeansScan::MainTable (good) duration: 18.55s TTxDataShardLocalKMeansScan::BuildToPosting (good) duration: 18.32s TTxDataShardFilterKMeansScan::BuildToPosting (good) duration: 17.35s TTxDataShardFilterKMeansScan::BuildToBuild (good) duration: 16.70s TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap (good) duration: 16.44s 34 more tests with 354.21s total duration are not listed. 6 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/stderr [timeout] TTxDataShardReshuffleKMeansScan::BuildToBuild [default-linux-x86_64-release-asan] (14.54s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/TTxDataShardReshuffleKMeansScan.BuildToBuild.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/build_index/ut/test-results/unittest/testing_out_stuff/TTxDataShardReshuffleKMeansScan.BuildToBuild.out ------ TIMEOUT: 43 - GOOD, 6 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/tx/datashard/build_index/ut ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 3/10] chunk ran 5 tests (total:641.89s - setup:0.05s test:600.11s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 199.76s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (good) duration: 164.84s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 161.96s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (timeout) duration: 92.43s test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (good) duration: 10.02s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (92.43s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_column_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 4/10] chunk ran 4 tests (total:464.73s - test:462.37s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (59.47s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (117.70s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (101.60s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (174.88s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 9/10] chunk ran 4 tests (total:631.70s - setup:0.01s test:600.04s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 229.71s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 207.49s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (timeout) duration: 190.04s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (190.04s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ TIMEOUT: 37 - GOOD, 4 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/functional/serverless ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:605.98s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeMultiOperationId (timeout) duration: 604.53s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [timeout] AnalyzeColumnshard::AnalyzeMultiOperationId [default-linux-x86_64-release-asan] (604.53s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.out ------ TIMEOUT: 31 - GOOD, 1 - TIMEOUT ydb/core/statistics/aggregator/ut ydb/core/security/ldap_auth_provider/ut [size:medium] nchunks:10 ------ [8/10] chunk ran 6 tests (total:37.62s - test:37.54s) [crashed] LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==127744==ERROR: AddressSanitizer: stack-use-after-scope on address 0x7be4c7f194d8 at pc 0x00004df9418b bp 0x7be474633110 sp 0x7be474633108 READ of size 8 at 0x7be4c7f194d8 thread T129 (ydb-core-securi) 2026-01-10T12:08:38.684407Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-10T12:08:42.861943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-10T12:08:42.862005Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded #0 0x00004df9418a in begin /-S/contrib/libs/cxxsupp/libcxx/include/vector:1461:28 #1 0x00004df9418a in LdapMock::TLdapRequestProcessor::ProcessSearchRequest(std::__y1::vector, std::__y1::allocator>> const&) /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_message_processor.cpp:330:44 #2 0x00004df8cfc0 in LdapMock::TLdapRequestProcessor::Process(LdapMock::TLdapMockResponses const&) /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_message_processor.cpp:141:20 #3 0x00004df8b010 in HandleLdapMessage /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_mock.cpp:23:94 #4 0x00004df8b010 in LdapMock::LdapRequestHandler(TSharedPtr, LdapMock::TLdapMockResponses const&) /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_mock.cpp:30:34 #5 0x00004df8ab1d in operator() /-S/ydb/library/testlib/service_mocks/ldap_mock/ldap_simple_server.cpp:59:29 #6 0x00004df8ab1d in __invoke<(lambda at /-S/ydb/library/testl ..[snippet truncated].. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #20 0x00001adf16c7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #21 0x00001b6bbec9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #22 0x00001b6bbec9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #23 0x00001b6bbec9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #24 0x00001b689f17 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #25 0x00001adf0a03 in NKikimr::NTestSuiteLdapAuthProviderTest_nonSecure::TCurrentTest::Execute() /-S/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp:1443:1 #26 0x00001b68b6cf in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #27 0x00001b6b5d2c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #28 0x7fe4c9a29d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) ==127744==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/testing_out_stuff/LdapAuthProviderTest_nonSecure.LdapRefreshGroupsInfoWithError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/testing_out_stuff/LdapAuthProviderTest_nonSecure.LdapRefreshGroupsInfoWithError.out ------ FAIL: 61 - GOOD, 1 - CRASHED ydb/core/security/ldap_auth_provider/ut ydb/tests/functional/audit [size:medium] nchunks:500 ------ [test_canonical_records.py 5/100] chunk ran 1 test (total:90.45s - setup:0.01s test:90.37s) [fail] test_canonical_records.py::test_dstool_evict_vdisk_grpc [default-linux-x86_64-release-asan] (84.69s) ydb/tests/functional/audit/test_canonical_records.py:348: in test_dstool_evict_vdisk_grpc execute_dstool_grpc(ydb_cluster, TOKEN, ['vdisk', 'evict', '--vdisk-ids', vdisk_id, '--ignore-degraded-group-check', '--ignore-failure-model-group-check']) ydb/tests/functional/audit/helpers.py:60: in execute_dstool_grpc assert False, f'Command\n{full_cmd}\n finished with exit code {proc_result.exit_code}, stderr:\n\n{proc_result.std_err.decode("utf-8")}\n\nstdout:\n{proc_result.std_out.decode("utf-8")}' E AssertionError: Command E ['/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/dstool/ydb-dstool', '--endpoint', 'grpc://localhost:6823', 'vdisk', 'evict', '--vdisk-ids', '[82000000:1:0:0:0]', '--ignore-degraded-group-check', '--ignore-failure-model-group-check'] E finished with exit code 1, stderr: E E error, add --verbose for more info E E E stdout: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.py.test_dstool_evict_vdisk_grpc.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff ------ FAIL: 33 - GOOD, 1 - FAIL ydb/tests/functional/audit ydb/library/yql/providers/solomon/actors/ut [size:medium] ------ sole chunk ran 6 tests (total:87.15s - setup:0.12s recipes:4.53s test:81.67s recipes:0.45s) [fail] TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [default-linux-x86_64-release-asan] (16.73s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff/TDqSolomonWriteActorTest.TestWriteBigBatchSolomon.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff/TDqSolomonWriteActorTest.TestWriteBigBatchSolomon.out [fail] TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [default-linux-x86_64-release-asan] (16.98s) (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff/TDqSolomonWriteActorTest.TestWriteBigBatchMonitoring.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/providers/solomon/actors/ut/test-results/unittest/testing_out_stuff/TDqSolomonWriteActorTest.TestWriteBigBatchMonitoring.out ------ FAIL: 4 - GOOD, 2 - FAIL ydb/library/yql/providers/solomon/actors/ut ydb/tests/olap/column_family/compression [size:medium] nchunks:10 ------ [1/10] chunk ran 3 tests (total:383.67s - test:383.57s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [default-linux-x86_64-release-asan] (177.27s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015628 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_12_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.12.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 3 tests (total:417.63s - test:417.55s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [default-linux-x86_64-release-asan] (75.98s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015728 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_16_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.16.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [3/10] chunk ran 2 tests (total:84.04s - setup:0.01s test:83.93s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [default-linux-x86_64-release-asan] (76.33s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015796 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_18_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.18.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [default-linux-x86_64-release-asan] (0.01s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015796 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_19_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.19.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [6/10] chunk ran 2 tests (total:238.84s - test:238.79s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [default-linux-x86_64-release-asan] (140.31s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015444 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_4_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.4.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [7/10] chunk ran 2 tests (total:81.66s - setup:0.01s test:81.57s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [default-linux-x86_64-release-asan] (74.49s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1031249 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_6_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.6.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [default-linux-x86_64-release-asan] (0.01s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1031249 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_7_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.7.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [8/10] chunk ran 2 tests (total:84.03s - test:83.95s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [default-linux-x86_64-release-asan] (76.19s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015642 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_8_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [default-linux-x86_64-release-asan] (0.01s) setup failed: ydb/tests/olap/column_family/compression/alter_compression.py:133: in setup_class cls.create_table_without_compression() ydb/tests/olap/column_family/compression/alter_compression.py:168: in create_table_without_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015642 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_9_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.9.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ FAIL: 14 - GOOD, 9 - FAIL ydb/tests/olap/column_family/compression ydb/tests/functional/statistics [size:medium] nchunks:10 ------ [test_restarts.py 0/10] chunk ran 1 test (total:143.39s - setup:0.02s test:143.31s) [fail] test_restarts.py::test_basic [default-linux-x86_64-release-asan] (137.38s) ydb/tests/functional/statistics/test_restarts.py:95: in test_basic assert_that(wait_for(get_base_stats_response, timeout_seconds=5), E AssertionError: base stats available after restart Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff/test_restarts.py.test_basic.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/statistics ------ [1/10] chunk ran 1 test (total:494.22s - setup:0.19s test:493.78s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.7G (14342600K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 10219 54.8M 54.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 10283 41.6M 24.3M 11.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 10308 58.6M 58.6M 32.5M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing 10461 9.6G 9.6G 9.4G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/tests/datashard/ttl [size:medium] nchunks:36 ------ [10/36] chunk ran 2 tests (total:392.37s - test:392.30s) [fail] test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (283.38s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Datetime_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [7/36] chunk ran 2 tests (total:387.50s - test:387.45s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (279.01s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Date_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ FAIL: 46 - GOOD, 2 - FAIL ydb/tests/datashard/ttl Total 354 suites: 323 - GOOD 23 - FAIL 8 - TIMEOUT Total 9483 tests: 9244 - GOOD 139 - FAIL 80 - NOT_LAUNCHED 11 - TIMEOUT 7 - SKIPPED 2 - CRASHED Cache efficiency ratio is 89.33% (44822 of 50177). Local: 0 (0.00%), dist: 10016 (19.96%), by dynamic uids: 0 (0.00%), avoided: 34806 (69.37%) Dist cache download: count=4677, size=17.14 GiB, speed=162.77 MiB/s Disk usage for tools/sdk at least 56.35 MiB Additional disk space consumed for build cache 1.05 TiB Critical path: [ 71929 ms] [CC] [ntq7Vu7C3_dDM668H7ggxQ default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp [started: 0 (1768046076314), finished: 71929 (1768046148243)] [ 309 ms] [AR] [_lvHIdNsmzEc0NIJsVagWA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a [started: 72033 (1768046148347), finished: 72342 (1768046148656)] [ 94038 ms] [LD] [6PWldXWVPUo7QTD4C6MI2A default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 245261 (1768046321575), finished: 339299 (1768046415613)] [642532 ms] [TM] [test-6179208328368260590 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 3146820 (1768049223134), finished: 3789352 (1768049865666)] [ 31901 ms] [TA] [test-6800863756323587451]: $(BUILD_ROOT)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} [started: 3789477 (1768049865791), finished: 3821378 (1768049897692)] Time from start: 4390763 ms, time elapsed by graph 840709 ms, time diff 3550054 ms. The longest 10 tasks: [642532 ms] [TM] [test-6179208328368260590 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1768049223134, finished: 1768049865666] [632426 ms] [TM] [test-12101741546369679276 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1768049201895, finished: 1768049834321] [631177 ms] [TM] [test-2037793465808271995 asan default-linux-x86_64 release]: ydb/core/cms/ut_sentinel_unstable/unittest [started: 1768046221324, finished: 1768046852501] [627656 ms] [TM] [test-11047368475442564516 asan default-linux-x86_64 release]: ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest [started: 1768046263926, finished: 1768046891582] [614767 ms] [TM] [test-6757000280658226552 asan default-linux-x86_64 release]: ydb/core/tx/datashard/build_index/ut/unittest [started: 1768046306431, finished: 1768046921198] [607679 ms] [TM] [test-4871023813394682894 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1768047698096, finished: 1768048305775] [607334 ms] [TM] [test-1517513937399730543 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1768047835238, finished: 1768048442572] [606120 ms] [TM] [test-13491190358138893179 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1768046891718, finished: 1768047497838] [605963 ms] [TM] [test-11683332027236015763 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1768047837503, finished: 1768048443466] [605153 ms] [TM] [test-10628259468471780305 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1768047838026, finished: 1768048443179] Total time by type: [163263385 ms] [TM] [count: 4467, ave time 36548.78 msec] [ 13799048 ms] [LD] [count: 453, ave time 30461.47 msec] [ 10338557 ms] [prepare:get from dist cache] [count: 10016, ave time 1032.20 msec] [ 1779520 ms] [TS] [count: 180, ave time 9886.22 msec] [ 724237 ms] [prepare:bazel-store] [count: 3, ave time 241412.33 msec] [ 527136 ms] [prepare:put to dist cache] [count: 5267, ave time 100.08 msec] [ 501711 ms] [TA] [count: 231, ave time 2171.91 msec] [ 386014 ms] [prepare:tools] [count: 22, ave time 17546.09 msec] [ 301166 ms] [prepare:put into local cache, clean build dir] [count: 10031, ave time 30.02 msec] [ 135982 ms] [prepare:AC] [count: 4, ave time 33995.50 msec] [ 75927 ms] [CC] [count: 6, ave time 12654.50 msec] [ 7627 ms] [AR] [count: 9, ave time 847.44 msec] [ 958 ms] [CF] [count: 2, ave time 479.00 msec] [ 920 ms] [prepare:resources] [count: 1, ave time 920.00 msec] [ 913 ms] [UN] [count: 2, ave time 456.50 msec] [ 781 ms] [CP] [count: 2, ave time 390.50 msec] [ 600 ms] [SB] [count: 1, ave time 600.00 msec] [ 585 ms] [BI] [count: 1, ave time 585.00 msec] [ 135 ms] [PK] [count: 1, ave time 135.00 msec] [ 33 ms] [prepare:clean] [count: 3, ave time 11.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 165544616 ms (92.26%) Total run tasks time - 179431190 ms Configure time - 5.3 s Statistics overhead 2310 ms Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0